From 9c81c52bbad07d2a1a1c57dd01d21c9636f4ad7e Mon Sep 17 00:00:00 2001 From: Francois Campbell Date: Fri, 23 Aug 2024 13:03:32 -0400 Subject: [PATCH] SNOW-1636849 Auto-teardown Native App in integration tests (#1478) Changes `with project_directory()` to `with nativeapp_project_directory()`, which automatically runs `snow app teardown` before exiting the project. This allows us to remove the `try`/`finally` in most tests. For tests that were using `with pushd(test_project)`, this has been changed to `with nativeapp_teardown()`, which is what `with nativeapp_project_directory()` uses under the hood. --- tests_integration/nativeapp/conftest.py | 63 +++ tests_integration/nativeapp/test_bundle.py | 4 +- .../nativeapp/test_debug_mode.py | 12 +- tests_integration/nativeapp/test_deploy.py | 494 +++++++----------- tests_integration/nativeapp/test_events.py | 14 +- tests_integration/nativeapp/test_init_run.py | 258 ++++----- .../nativeapp/test_large_upload.py | 1 - tests_integration/nativeapp/test_open.py | 28 +- .../nativeapp/test_post_deploy.py | 4 +- .../nativeapp/test_project_templating.py | 75 +-- tests_integration/nativeapp/test_teardown.py | 204 ++++---- tests_integration/nativeapp/test_validate.py | 38 +- tests_integration/nativeapp/test_version.py | 75 +-- 13 files changed, 498 insertions(+), 772 deletions(-) create mode 100644 tests_integration/nativeapp/conftest.py diff --git a/tests_integration/nativeapp/conftest.py b/tests_integration/nativeapp/conftest.py new file mode 100644 index 0000000000..6b909b4d18 --- /dev/null +++ b/tests_integration/nativeapp/conftest.py @@ -0,0 +1,63 @@ +from __future__ import annotations + +from contextlib import contextmanager +from pathlib import Path +from typing import Any + +import pytest + +from tests_integration.conftest import SnowCLIRunner + + +@pytest.fixture +def nativeapp_project_directory(project_directory, nativeapp_teardown): + """Wrapper around the project_directory fixture specific to Native App testing. + + This fixture provides a context manager that does the following: + - Automatically calls `snow app teardown` before exiting + + Parameters for the returned context manager: + :param name: The name of the directory in tests_integration/test_data/projects to use. + """ + + @contextmanager + def _nativeapp_project_directory(name): + with project_directory(name) as d: + with nativeapp_teardown(project_dir=d): + yield d + + return _nativeapp_project_directory + + +@pytest.fixture +def nativeapp_teardown(runner: SnowCLIRunner): + """Runs `snow app teardown` before exiting. + + This fixture provides a context manager that runs + `snow app teardown --force --cascade` before exiting, + regardless of any exceptions raised. + + Parameters for the returned context manager: + :param project_dir: Path to the project directory (optional) + :param env: Environment variables to replace os.environ (optional) + """ + + @contextmanager + def _nativeapp_teardown( + *, + project_dir: Path | None = None, + env: dict | None = None, + ): + try: + yield + finally: + args = ["--force", "--cascade"] + if project_dir: + args += ["--project", str(project_dir)] + kwargs: dict[str, Any] = {} + if env: + kwargs["env"] = env + result = runner.invoke_with_connection(["app", "teardown", *args], **kwargs) + assert result.exit_code == 0 + + return _nativeapp_teardown diff --git a/tests_integration/nativeapp/test_bundle.py b/tests_integration/nativeapp/test_bundle.py index 23e9e776bb..fc4f665450 100644 --- a/tests_integration/nativeapp/test_bundle.py +++ b/tests_integration/nativeapp/test_bundle.py @@ -24,10 +24,10 @@ @pytest.fixture(scope="function", params=["napp_init_v1", "napp_init_v2"]) -def template_setup(runner, project_directory, request): +def template_setup(runner, nativeapp_project_directory, request): test_project = request.param with enable_definition_v2_feature_flag: - with project_directory(test_project) as project_root: + with nativeapp_project_directory(test_project) as project_root: # Vanilla bundle on the unmodified template result = runner.invoke_json(["app", "bundle"]) assert result.exit_code == 0 diff --git a/tests_integration/nativeapp/test_debug_mode.py b/tests_integration/nativeapp/test_debug_mode.py index e782ee07ea..6de8e21033 100644 --- a/tests_integration/nativeapp/test_debug_mode.py +++ b/tests_integration/nativeapp/test_debug_mode.py @@ -90,6 +90,7 @@ def test_nativeapp_controlled_debug_mode( snowflake_session, default_username, resource_suffix, + nativeapp_teardown, project_definition_files: List[Path], ): project_name = "integration" @@ -110,7 +111,7 @@ def test_nativeapp_controlled_debug_mode( result = runner.invoke_with_connection_json(["app", "run"]) assert result.exit_code == 0 - try: + with nativeapp_teardown(): # debug mode should be true by default on first app deploy, # because snowflake.yml doesn't set it explicitly either way ("uncontrolled") assert is_debug_mode(snowflake_session, app_name) @@ -135,12 +136,3 @@ def test_nativeapp_controlled_debug_mode( result = runner.invoke_with_connection_json(["app", "run"]) assert result.exit_code == 0 assert is_debug_mode(snowflake_session, app_name) - - # make sure we always delete the app - result = runner.invoke_with_connection_json(["app", "teardown"]) - assert result.exit_code == 0 - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 diff --git a/tests_integration/nativeapp/test_deploy.py b/tests_integration/nativeapp/test_deploy.py index 4244512718..e028cff91e 100644 --- a/tests_integration/nativeapp/test_deploy.py +++ b/tests_integration/nativeapp/test_deploy.py @@ -49,7 +49,7 @@ def _sanitize_deploy_output(output): @pytest.mark.parametrize("test_project", ["napp_init_v1", "napp_init_v2"]) def test_nativeapp_deploy( test_project, - project_directory, + nativeapp_project_directory, runner, snowflake_session, default_username, @@ -59,52 +59,44 @@ def test_nativeapp_deploy( print_paths_as_posix, ): project_name = "myapp" - with project_directory(test_project): + with nativeapp_project_directory(test_project): result = runner.invoke_with_connection(["app", "deploy"]) assert result.exit_code == 0 assert sanitize_deploy_output(result.output) == snapshot - try: - # package exist - package_name = ( - f"{project_name}_pkg_{default_username}{resource_suffix}".upper() - ) - app_name = f"{project_name}_{default_username}{resource_suffix}".upper() - assert contains_row_with( - row_from_snowflake_session( - snowflake_session.execute_string( - f"show application packages like '{package_name}'", - ) - ), - dict(name=package_name), - ) - - # manifest file exists - stage_name = "app_src.stage" # as defined in native-apps-templates/basic - stage_files = runner.invoke_with_connection_json( - ["stage", "list-files", f"{package_name}.{stage_name}"] - ) - assert contains_row_with(stage_files.json, {"name": "stage/manifest.yml"}) - - # app does not exist - assert not_contains_row_with( - row_from_snowflake_session( - snowflake_session.execute_string( - f"show applications like '{app_name}'", - ) - ), - dict(name=app_name), - ) - - # re-deploying should be a no-op; make sure we don't issue any PUT commands - result = runner.invoke_with_connection_json(["app", "deploy", "--debug"]) - assert result.exit_code == 0 - assert "Successfully uploaded chunk 0 of file" not in result.output - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 + # package exist + package_name = f"{project_name}_pkg_{default_username}{resource_suffix}".upper() + app_name = f"{project_name}_{default_username}{resource_suffix}".upper() + assert contains_row_with( + row_from_snowflake_session( + snowflake_session.execute_string( + f"show application packages like '{package_name}'", + ) + ), + dict(name=package_name), + ) + + # manifest file exists + stage_name = "app_src.stage" # as defined in native-apps-templates/basic + stage_files = runner.invoke_with_connection_json( + ["stage", "list-files", f"{package_name}.{stage_name}"] + ) + assert contains_row_with(stage_files.json, {"name": "stage/manifest.yml"}) + + # app does not exist + assert not_contains_row_with( + row_from_snowflake_session( + snowflake_session.execute_string( + f"show applications like '{app_name}'", + ) + ), + dict(name=app_name), + ) + + # re-deploying should be a no-op; make sure we don't issue any PUT commands + result = runner.invoke_with_connection_json(["app", "deploy", "--debug"]) + assert result.exit_code == 0 + assert "Successfully uploaded chunk 0 of file" not in result.output @pytest.mark.integration @@ -130,7 +122,7 @@ def test_nativeapp_deploy_prune( contains, not_contains, test_project, - project_directory, + nativeapp_project_directory, runner, snapshot, print_paths_as_posix, @@ -139,40 +131,28 @@ def test_nativeapp_deploy_prune( sanitize_deploy_output, ): project_name = "myapp" - with project_directory(test_project): + with nativeapp_project_directory(test_project): result = runner.invoke_with_connection_json(["app", "deploy"]) assert result.exit_code == 0 - try: - # delete a file locally - os.remove(os.path.join("app", "README.md")) - - # deploy - result = runner.invoke_with_connection(command.split()) - assert result.exit_code == 0 - assert sanitize_deploy_output(result.output) == snapshot - - # verify the file does not exist on the stage - package_name = ( - f"{project_name}_pkg_{default_username}{resource_suffix}".upper() - ) - stage_name = "app_src.stage" # as defined in native-apps-templates/basic - stage_files = runner.invoke_with_connection_json( - ["stage", "list-files", f"{package_name}.{stage_name}"] - ) - for name in contains: - assert contains_row_with(stage_files.json, {"name": name}) - for name in not_contains: - assert not_contains_row_with(stage_files.json, {"name": name}) - - # make sure we always delete the app - result = runner.invoke_with_connection_json(["app", "teardown"]) - assert result.exit_code == 0 - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 + # delete a file locally + os.remove(os.path.join("app", "README.md")) + + # deploy + result = runner.invoke_with_connection(command.split()) + assert result.exit_code == 0 + assert sanitize_deploy_output(result.output) == snapshot + + # verify the file does not exist on the stage + package_name = f"{project_name}_pkg_{default_username}{resource_suffix}".upper() + stage_name = "app_src.stage" # as defined in native-apps-templates/basic + stage_files = runner.invoke_with_connection_json( + ["stage", "list-files", f"{package_name}.{stage_name}"] + ) + for name in contains: + assert contains_row_with(stage_files.json, {"name": name}) + for name in not_contains: + assert not_contains_row_with(stage_files.json, {"name": name}) # Tests a simple flow of executing "snow app deploy [files]", verifying that only the specified files are synced to the stage @@ -181,7 +161,7 @@ def test_nativeapp_deploy_prune( @pytest.mark.parametrize("test_project", ["napp_init_v1", "napp_init_v2"]) def test_nativeapp_deploy_files( test_project, - project_directory, + nativeapp_project_directory, runner, snapshot, print_paths_as_posix, @@ -190,7 +170,7 @@ def test_nativeapp_deploy_files( sanitize_deploy_output, ): project_name = "myapp" - with project_directory(test_project): + with nativeapp_project_directory(test_project): # sync only two specific files to stage result = runner.invoke_with_connection( [ @@ -204,29 +184,15 @@ def test_nativeapp_deploy_files( assert result.exit_code == 0 assert sanitize_deploy_output(result.output) == snapshot - try: - # manifest and script files exist, readme doesn't exist - package_name = ( - f"{project_name}_pkg_{default_username}{resource_suffix}".upper() - ) - stage_name = "app_src.stage" # as defined in native-apps-templates/basic - stage_files = runner.invoke_with_connection_json( - ["stage", "list-files", f"{package_name}.{stage_name}"] - ) - assert contains_row_with(stage_files.json, {"name": "stage/manifest.yml"}) - assert contains_row_with( - stage_files.json, {"name": "stage/setup_script.sql"} - ) - assert not_contains_row_with(stage_files.json, {"name": "stage/README.md"}) - - # make sure we always delete the app - result = runner.invoke_with_connection_json(["app", "teardown"]) - assert result.exit_code == 0 - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 + # manifest and script files exist, readme doesn't exist + package_name = f"{project_name}_pkg_{default_username}{resource_suffix}".upper() + stage_name = "app_src.stage" # as defined in native-apps-templates/basic + stage_files = runner.invoke_with_connection_json( + ["stage", "list-files", f"{package_name}.{stage_name}"] + ) + assert contains_row_with(stage_files.json, {"name": "stage/manifest.yml"}) + assert contains_row_with(stage_files.json, {"name": "stage/setup_script.sql"}) + assert not_contains_row_with(stage_files.json, {"name": "stage/README.md"}) # Tests that files inside of a symlinked directory are deployed @@ -235,7 +201,7 @@ def test_nativeapp_deploy_files( @pytest.mark.parametrize("test_project", ["napp_init_v1", "napp_init_v2"]) def test_nativeapp_deploy_nested_directories( test_project, - project_directory, + nativeapp_project_directory, runner, snapshot, print_paths_as_posix, @@ -244,7 +210,7 @@ def test_nativeapp_deploy_nested_directories( sanitize_deploy_output, ): project_name = "myapp" - with project_directory(test_project): + with nativeapp_project_directory(test_project): # create nested file under app/ touch("app/nested/dir/file.txt") @@ -254,26 +220,14 @@ def test_nativeapp_deploy_nested_directories( assert result.exit_code == 0 assert sanitize_deploy_output(result.output) == snapshot - try: - package_name = ( - f"{project_name}_pkg_{default_username}{resource_suffix}".upper() - ) - stage_name = "app_src.stage" # as defined in native-apps-templates/basic - stage_files = runner.invoke_with_connection_json( - ["stage", "list-files", f"{package_name}.{stage_name}"] - ) - assert contains_row_with( - stage_files.json, {"name": "stage/nested/dir/file.txt"} - ) - - # make sure we always delete the app - result = runner.invoke_with_connection_json(["app", "teardown"]) - assert result.exit_code == 0 - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 + package_name = f"{project_name}_pkg_{default_username}{resource_suffix}".upper() + stage_name = "app_src.stage" # as defined in native-apps-templates/basic + stage_files = runner.invoke_with_connection_json( + ["stage", "list-files", f"{package_name}.{stage_name}"] + ) + assert contains_row_with( + stage_files.json, {"name": "stage/nested/dir/file.txt"} + ) # Tests that deploying a directory recursively syncs all of its contents @@ -282,14 +236,14 @@ def test_nativeapp_deploy_nested_directories( @pytest.mark.parametrize("test_project", ["napp_init_v1", "napp_init_v2"]) def test_nativeapp_deploy_directory( test_project, - project_directory, + nativeapp_project_directory, runner, default_username, resource_suffix, sanitize_deploy_output, ): project_name = "myapp" - with project_directory(test_project): + with nativeapp_project_directory(test_project): touch("app/dir/file.txt") result = runner.invoke_with_connection( ["app", "deploy", "app/dir", "--no-recursive", "--no-validate"] @@ -303,24 +257,12 @@ def test_nativeapp_deploy_directory( ) assert result.exit_code == 0 - try: - package_name = ( - f"{project_name}_pkg_{default_username}{resource_suffix}".upper() - ) - stage_name = "app_src.stage" # as defined in native-apps-templates/basic - stage_files = runner.invoke_with_connection_json( - ["stage", "list-files", f"{package_name}.{stage_name}"] - ) - assert contains_row_with(stage_files.json, {"name": "stage/dir/file.txt"}) - - # make sure we always delete the app - result = runner.invoke_with_connection_json(["app", "teardown"]) - assert result.exit_code == 0 - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 + package_name = f"{project_name}_pkg_{default_username}{resource_suffix}".upper() + stage_name = "app_src.stage" # as defined in native-apps-templates/basic + stage_files = runner.invoke_with_connection_json( + ["stage", "list-files", f"{package_name}.{stage_name}"] + ) + assert contains_row_with(stage_files.json, {"name": "stage/dir/file.txt"}) # Tests that deploying a directory without specifying -r returns an error @@ -329,21 +271,15 @@ def test_nativeapp_deploy_directory( @pytest.mark.parametrize("test_project", ["napp_init_v1", "napp_init_v2"]) def test_nativeapp_deploy_directory_no_recursive( test_project, - project_directory, + nativeapp_project_directory, runner, ): - with project_directory(test_project): - try: - touch("app/nested/dir/file.txt") - result = runner.invoke_with_connection_json( - ["app", "deploy", "app/nested", "--no-validate"] - ) - assert result.exit_code == 1, result.output - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 + with nativeapp_project_directory(test_project): + touch("app/nested/dir/file.txt") + result = runner.invoke_with_connection_json( + ["app", "deploy", "app/nested", "--no-validate"] + ) + assert result.exit_code == 1, result.output # Tests that specifying an unknown path to deploy results in an error @@ -352,21 +288,15 @@ def test_nativeapp_deploy_directory_no_recursive( @pytest.mark.parametrize("test_project", ["napp_init_v1", "napp_init_v2"]) def test_nativeapp_deploy_unknown_path( test_project, - project_directory, + nativeapp_project_directory, runner, ): - with project_directory(test_project): - try: - result = runner.invoke_with_connection_json( - ["app", "deploy", "does_not_exist", "--no-validate"] - ) - assert result.exit_code == 1 - assert "The following path does not exist:" in result.output - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 + with nativeapp_project_directory(test_project): + result = runner.invoke_with_connection_json( + ["app", "deploy", "does_not_exist", "--no-validate"] + ) + assert result.exit_code == 1 + assert "The following path does not exist:" in result.output # Tests that specifying a path with no deploy artifact results in an error @@ -375,21 +305,15 @@ def test_nativeapp_deploy_unknown_path( @pytest.mark.parametrize("test_project", ["napp_init_v1", "napp_init_v2"]) def test_nativeapp_deploy_path_with_no_mapping( test_project, - project_directory, + nativeapp_project_directory, runner, ): - with project_directory(test_project): - try: - result = runner.invoke_with_connection_json( - ["app", "deploy", "snowflake.yml", "--no-validate"] - ) - assert result.exit_code == 1 - assert "No artifact found for" in result.output - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 + with nativeapp_project_directory(test_project): + result = runner.invoke_with_connection_json( + ["app", "deploy", "snowflake.yml", "--no-validate"] + ) + assert result.exit_code == 1 + assert "No artifact found for" in result.output # Tests that specifying a path and pruning result in an error @@ -398,25 +322,19 @@ def test_nativeapp_deploy_path_with_no_mapping( @pytest.mark.parametrize("test_project", ["napp_init_v1", "napp_init_v2"]) def test_nativeapp_deploy_rejects_pruning_when_path_is_specified( test_project, - project_directory, + nativeapp_project_directory, runner, ): - with project_directory(test_project): - try: - os.unlink("app/README.md") - result = runner.invoke_with_connection_json( - ["app", "deploy", "app/README.md", "--prune"] - ) - - assert_that_result_is_usage_error( - result, - "Parameters 'paths' and '--prune' are incompatible and cannot be used simultaneously.", - ) + with nativeapp_project_directory(test_project): + os.unlink("app/README.md") + result = runner.invoke_with_connection_json( + ["app", "deploy", "app/README.md", "--prune"] + ) - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 + assert_that_result_is_usage_error( + result, + "Parameters 'paths' and '--prune' are incompatible and cannot be used simultaneously.", + ) # Tests that specifying a path with no direct mapping falls back to search for prefix matches @@ -427,7 +345,7 @@ def test_nativeapp_deploy_rejects_pruning_when_path_is_specified( ) def test_nativeapp_deploy_looks_for_prefix_matches( test_project, - project_directory, + nativeapp_project_directory, runner, snapshot, print_paths_as_posix, @@ -437,83 +355,72 @@ def test_nativeapp_deploy_looks_for_prefix_matches( ): project_name = "myapp" - with project_directory(test_project): - try: - result = runner.invoke_with_connection(["app", "deploy", "-r", "app"]) - assert result.exit_code == 0 - assert sanitize_deploy_output(result.output) == snapshot - - package_name = ( - f"{project_name}_pkg_{default_username}{resource_suffix}".upper() - ) - stage_name = "app_src.stage" # as defined in native-apps-templates/basic - stage_files = runner.invoke_with_connection_json( - ["stage", "list-files", f"{package_name}.{stage_name}"] - ) - assert contains_row_with(stage_files.json, {"name": "stage/manifest.yml"}) - assert contains_row_with( - stage_files.json, {"name": "stage/setup_script.sql"} - ) - assert contains_row_with(stage_files.json, {"name": "stage/README.md"}) - assert not_contains_row_with( - stage_files.json, {"name": "stage/src/main.py"} - ) - assert not_contains_row_with( - stage_files.json, {"name": "stage/parent-lib/child/c/c.py"} - ) - - result = runner.invoke_with_connection( - ["app", "deploy", "-r", "lib/parent/child/c"] - ) - assert result.exit_code == 0 - stage_files = runner.invoke_with_connection_json( - ["stage", "list-files", f"{package_name}.{stage_name}"] - ) - assert contains_row_with( - stage_files.json, {"name": "stage/parent-lib/child/c/c.py"} - ) - assert not_contains_row_with( - stage_files.json, {"name": "stage/parent-lib/child/a.py"} - ) - assert not_contains_row_with( - stage_files.json, {"name": "stage/parent-lib/child/b.py"} - ) - - result = runner.invoke_with_connection( - ["app", "deploy", "lib/parent/child/a.py"] - ) - assert result.exit_code == 0 - stage_files = runner.invoke_with_connection_json( - ["stage", "list-files", f"{package_name}.{stage_name}"] - ) - assert contains_row_with( - stage_files.json, {"name": "stage/parent-lib/child/c/c.py"} - ) - assert contains_row_with( - stage_files.json, {"name": "stage/parent-lib/child/a.py"} - ) - assert not_contains_row_with( - stage_files.json, {"name": "stage/parent-lib/child/b.py"} - ) - - result = runner.invoke_with_connection(["app", "deploy", "lib", "-r"]) - assert result.exit_code == 0 - stage_files = runner.invoke_with_connection_json( - ["stage", "list-files", f"{package_name}.{stage_name}"] - ) - assert contains_row_with( - stage_files.json, {"name": "stage/parent-lib/child/c/c.py"} - ) - assert contains_row_with( - stage_files.json, {"name": "stage/parent-lib/child/a.py"} - ) - assert contains_row_with( - stage_files.json, {"name": "stage/parent-lib/child/b.py"} - ) - - finally: - result = runner.invoke_with_connection(["app", "teardown", "--force"]) - assert result.exit_code == 0 + with nativeapp_project_directory(test_project): + result = runner.invoke_with_connection(["app", "deploy", "-r", "app"]) + assert result.exit_code == 0 + assert sanitize_deploy_output(result.output) == snapshot + + package_name = f"{project_name}_pkg_{default_username}{resource_suffix}".upper() + stage_name = "app_src.stage" # as defined in native-apps-templates/basic + stage_files = runner.invoke_with_connection_json( + ["stage", "list-files", f"{package_name}.{stage_name}"] + ) + assert contains_row_with(stage_files.json, {"name": "stage/manifest.yml"}) + assert contains_row_with(stage_files.json, {"name": "stage/setup_script.sql"}) + assert contains_row_with(stage_files.json, {"name": "stage/README.md"}) + assert not_contains_row_with(stage_files.json, {"name": "stage/src/main.py"}) + assert not_contains_row_with( + stage_files.json, {"name": "stage/parent-lib/child/c/c.py"} + ) + + result = runner.invoke_with_connection( + ["app", "deploy", "-r", "lib/parent/child/c"] + ) + assert result.exit_code == 0 + stage_files = runner.invoke_with_connection_json( + ["stage", "list-files", f"{package_name}.{stage_name}"] + ) + assert contains_row_with( + stage_files.json, {"name": "stage/parent-lib/child/c/c.py"} + ) + assert not_contains_row_with( + stage_files.json, {"name": "stage/parent-lib/child/a.py"} + ) + assert not_contains_row_with( + stage_files.json, {"name": "stage/parent-lib/child/b.py"} + ) + + result = runner.invoke_with_connection( + ["app", "deploy", "lib/parent/child/a.py"] + ) + assert result.exit_code == 0 + stage_files = runner.invoke_with_connection_json( + ["stage", "list-files", f"{package_name}.{stage_name}"] + ) + assert contains_row_with( + stage_files.json, {"name": "stage/parent-lib/child/c/c.py"} + ) + assert contains_row_with( + stage_files.json, {"name": "stage/parent-lib/child/a.py"} + ) + assert not_contains_row_with( + stage_files.json, {"name": "stage/parent-lib/child/b.py"} + ) + + result = runner.invoke_with_connection(["app", "deploy", "lib", "-r"]) + assert result.exit_code == 0 + stage_files = runner.invoke_with_connection_json( + ["stage", "list-files", f"{package_name}.{stage_name}"] + ) + assert contains_row_with( + stage_files.json, {"name": "stage/parent-lib/child/c/c.py"} + ) + assert contains_row_with( + stage_files.json, {"name": "stage/parent-lib/child/a.py"} + ) + assert contains_row_with( + stage_files.json, {"name": "stage/parent-lib/child/b.py"} + ) # Tests that snow app deploy -r . deploys all changes @@ -522,7 +429,7 @@ def test_nativeapp_deploy_looks_for_prefix_matches( @pytest.mark.parametrize("test_project", ["napp_init_v1", "napp_init_v2"]) def test_nativeapp_deploy_dot( test_project, - project_directory, + nativeapp_project_directory, runner, snapshot, print_paths_as_posix, @@ -531,25 +438,16 @@ def test_nativeapp_deploy_dot( sanitize_deploy_output, ): project_name = "myapp" - with project_directory(test_project): - try: - result = runner.invoke_with_connection(["app", "deploy", "-r", "."]) - assert result.exit_code == 0 - assert sanitize_deploy_output(result.output) == snapshot - - package_name = ( - f"{project_name}_pkg_{default_username}{resource_suffix}".upper() - ) - stage_name = "app_src.stage" # as defined in native-apps-templates/basic - stage_files = runner.invoke_with_connection_json( - ["stage", "list-files", f"{package_name}.{stage_name}"] - ) - assert contains_row_with(stage_files.json, {"name": "stage/manifest.yml"}) - assert contains_row_with( - stage_files.json, {"name": "stage/setup_script.sql"} - ) - assert contains_row_with(stage_files.json, {"name": "stage/README.md"}) - - finally: - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 + with nativeapp_project_directory(test_project): + result = runner.invoke_with_connection(["app", "deploy", "-r", "."]) + assert result.exit_code == 0 + assert sanitize_deploy_output(result.output) == snapshot + + package_name = f"{project_name}_pkg_{default_username}{resource_suffix}".upper() + stage_name = "app_src.stage" # as defined in native-apps-templates/basic + stage_files = runner.invoke_with_connection_json( + ["stage", "list-files", f"{package_name}.{stage_name}"] + ) + assert contains_row_with(stage_files.json, {"name": "stage/manifest.yml"}) + assert contains_row_with(stage_files.json, {"name": "stage/setup_script.sql"}) + assert contains_row_with(stage_files.json, {"name": "stage/README.md"}) diff --git a/tests_integration/nativeapp/test_events.py b/tests_integration/nativeapp/test_events.py index f921db43e4..849ddfde5e 100644 --- a/tests_integration/nativeapp/test_events.py +++ b/tests_integration/nativeapp/test_events.py @@ -39,9 +39,9 @@ ], ) def test_app_events_mutually_exclusive_options( - test_project, runner, project_directory, flag_names, command + test_project, runner, nativeapp_project_directory, flag_names, command ): - with project_directory(test_project): + with nativeapp_project_directory(test_project): # The integration test account doesn't have an event table set up # but this test is still useful to validate the negative case result = runner.invoke_with_connection(["app", "events", *command]) @@ -69,9 +69,9 @@ def test_app_events_mutually_exclusive_options( ], ) def test_app_events_paired_options( - test_project, runner, project_directory, flag_names, command + test_project, runner, nativeapp_project_directory, flag_names, command ): - with project_directory(test_project): + with nativeapp_project_directory(test_project): # The integration test account doesn't have an event table set up # but this test is still useful to validate the negative case result = runner.invoke_with_connection(["app", "events", *command]) @@ -84,8 +84,10 @@ def test_app_events_paired_options( @pytest.mark.integration @enable_definition_v2_feature_flag @pytest.mark.parametrize("test_project", ["napp_init_v1", "napp_init_v2"]) -def test_app_events_reject_invalid_type(test_project, runner, project_directory): - with project_directory(test_project): +def test_app_events_reject_invalid_type( + test_project, runner, nativeapp_project_directory +): + with nativeapp_project_directory(test_project): # The integration test account doesn't have an event table set up # but this test is still useful to validate the negative case result = runner.invoke_with_connection(["app", "events", "--type", "foo"]) diff --git a/tests_integration/nativeapp/test_init_run.py b/tests_integration/nativeapp/test_init_run.py index 4812585df5..d9925b88ff 100644 --- a/tests_integration/nativeapp/test_init_run.py +++ b/tests_integration/nativeapp/test_init_run.py @@ -32,48 +32,36 @@ @pytest.mark.parametrize("test_project", ["napp_init_v1", "napp_init_v2"]) def test_nativeapp_init_run_without_modifications( test_project, - project_directory, + nativeapp_project_directory, runner, snowflake_session, default_username, resource_suffix, ): project_name = "myapp" - with project_directory(test_project): + with nativeapp_project_directory(test_project): result = runner.invoke_with_connection_json(["app", "run"]) assert result.exit_code == 0 - try: - # app + package exist - package_name = ( - f"{project_name}_pkg_{default_username}{resource_suffix}".upper() - ) - app_name = f"{project_name}_{default_username}{resource_suffix}".upper() - assert contains_row_with( - row_from_snowflake_session( - snowflake_session.execute_string( - f"show application packages like '{package_name}'", - ) - ), - dict(name=package_name), - ) - assert contains_row_with( - row_from_snowflake_session( - snowflake_session.execute_string( - f"show applications like '{app_name}'", - ) - ), - dict(name=app_name), - ) - - # make sure we always delete the app - result = runner.invoke_with_connection_json(["app", "teardown"]) - assert result.exit_code == 0 - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 + # app + package exist + package_name = f"{project_name}_pkg_{default_username}{resource_suffix}".upper() + app_name = f"{project_name}_{default_username}{resource_suffix}".upper() + assert contains_row_with( + row_from_snowflake_session( + snowflake_session.execute_string( + f"show application packages like '{package_name}'", + ) + ), + dict(name=package_name), + ) + assert contains_row_with( + row_from_snowflake_session( + snowflake_session.execute_string( + f"show applications like '{app_name}'", + ) + ), + dict(name=app_name), + ) # Tests a simple flow of an existing project, but executing snow app run and teardown, all with distribution=internal @@ -87,6 +75,7 @@ def test_nativeapp_run_existing( snowflake_session, project_definition_files: List[Path], default_username, + nativeapp_teardown, resource_suffix, ): project_name = "integration" @@ -95,7 +84,7 @@ def test_nativeapp_run_existing( result = runner.invoke_with_connection_json(["app", "run"]) assert result.exit_code == 0 - try: + with nativeapp_teardown(): # app + package exist package_name = ( f"{project_name}_pkg_{default_username}{resource_suffix}".upper() @@ -137,15 +126,6 @@ def test_nativeapp_run_existing( {"ECHO": test_string}, ) - # make sure we always delete the app - result = runner.invoke_with_connection_json(["app", "teardown"]) - assert result.exit_code == 0 - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 - # Tests a simple flow of initiating a project, executing snow app run and teardown, all with distribution=internal @pytest.mark.integration @@ -153,48 +133,36 @@ def test_nativeapp_run_existing( @pytest.mark.parametrize("test_project", ["napp_init_v1", "napp_init_v2"]) def test_nativeapp_init_run_handles_spaces( test_project, - project_directory, + nativeapp_project_directory, runner, snowflake_session, default_username, resource_suffix, ): project_name = "myapp" - with project_directory(test_project): + with nativeapp_project_directory(test_project): result = runner.invoke_with_connection_json(["app", "run"]) assert result.exit_code == 0 - try: - # app + package exist - package_name = ( - f"{project_name}_pkg_{default_username}{resource_suffix}".upper() - ) - app_name = f"{project_name}_{default_username}{resource_suffix}".upper() - assert contains_row_with( - row_from_snowflake_session( - snowflake_session.execute_string( - f"show application packages like '{package_name}'", - ) - ), - dict(name=package_name), - ) - assert contains_row_with( - row_from_snowflake_session( - snowflake_session.execute_string( - f"show applications like '{app_name}'", - ) - ), - dict(name=app_name), - ) - - # make sure we always delete the app - result = runner.invoke_with_connection_json(["app", "teardown"]) - assert result.exit_code == 0 - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 + # app + package exist + package_name = f"{project_name}_pkg_{default_username}{resource_suffix}".upper() + app_name = f"{project_name}_{default_username}{resource_suffix}".upper() + assert contains_row_with( + row_from_snowflake_session( + snowflake_session.execute_string( + f"show application packages like '{package_name}'", + ) + ), + dict(name=package_name), + ) + assert contains_row_with( + row_from_snowflake_session( + snowflake_session.execute_string( + f"show applications like '{app_name}'", + ) + ), + dict(name=app_name), + ) # Tests a simple flow of an existing project, but executing snow app run and teardown, all with distribution=external @@ -210,6 +178,7 @@ def test_nativeapp_run_existing_w_external( snowflake_session, project_definition_files: List[Path], default_username, + nativeapp_teardown, resource_suffix, ): project_name = "integration_external" @@ -218,7 +187,7 @@ def test_nativeapp_run_existing_w_external( result = runner.invoke_with_connection_json(["app", "run"]) assert result.exit_code == 0 - try: + with nativeapp_teardown(): # app + package exist package_name = ( f"{project_name}_pkg_{default_username}{resource_suffix}".upper() @@ -273,64 +242,35 @@ def test_nativeapp_run_existing_w_external( {"ECHO": test_string}, ) - # make sure we always delete the app, --force required for external distribution - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 - - expect = snowflake_session.execute_string( - f"show applications like '{app_name}'" - ) - assert not_contains_row_with( - row_from_snowflake_session(expect), {"name": app_name} - ) - - expect = snowflake_session.execute_string( - f"show application packages like '{package_name}'" - ) - assert not_contains_row_with( - row_from_snowflake_session(expect), {"name": package_name} - ) - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 - # Verifies that running "app run" after "app deploy" upgrades the app @pytest.mark.integration @enable_definition_v2_feature_flag @pytest.mark.parametrize("test_project", ["napp_init_v1", "napp_init_v2"]) def test_nativeapp_run_after_deploy( - test_project, project_directory, runner, default_username, resource_suffix + test_project, nativeapp_project_directory, runner, default_username, resource_suffix ): project_name = "myapp" app_name = f"{project_name}_{default_username}{resource_suffix}" stage_fqn = f"{project_name}_pkg_{default_username}{resource_suffix}.app_src.stage" - with project_directory(test_project): - try: - # Run #1 - result = runner.invoke_with_connection_json(["app", "run"]) - assert result.exit_code == 0 - - # Make a change & deploy - with open("app/README.md", "a") as file: - file.write("### Test") - result = runner.invoke_with_connection_json(["app", "deploy"]) - assert result.exit_code == 0 + with nativeapp_project_directory(test_project): + # Run #1 + result = runner.invoke_with_connection_json(["app", "run"]) + assert result.exit_code == 0 - # Run #2 - result = runner.invoke_with_connection_json(["app", "run", "--debug"]) - assert result.exit_code == 0 - assert ( - f"alter application {app_name} upgrade using @{stage_fqn}" - in result.output - ) + # Make a change & deploy + with open("app/README.md", "a") as file: + file.write("### Test") + result = runner.invoke_with_connection_json(["app", "deploy"]) + assert result.exit_code == 0 - finally: - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 + # Run #2 + result = runner.invoke_with_connection_json(["app", "run", "--debug"]) + assert result.exit_code == 0 + assert ( + f"alter application {app_name} upgrade using @{stage_fqn}" in result.output + ) # Tests initialization of a project from a repo with a single template @@ -390,6 +330,7 @@ def test_nativeapp_run_orphan( force_flag, default_username, resource_suffix, + nativeapp_teardown, ): project_name = "integration" project_dir = project_definition_files[0].parent @@ -397,7 +338,7 @@ def test_nativeapp_run_orphan( result = runner.invoke_with_connection_json(["app", "run"]) assert result.exit_code == 0 - try: + with nativeapp_teardown(): # app + package exist package_name = ( f"{project_name}_pkg_{default_username}{resource_suffix}".upper() @@ -479,21 +420,6 @@ def test_nativeapp_run_orphan( dict(name=app_name, source=package_name), ) - # make sure we always delete the app - result = runner.invoke_with_connection_json(["app", "teardown"]) - assert result.exit_code == 0 - - finally: - # manually drop the application in case the test failed and it wasn't dropped - result = runner.invoke_with_connection( - ["sql", "-q", f"drop application if exists {app_name} cascade"] - ) - assert result.exit_code == 0, result.output - - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 - # Verifies that we can always cross-upgrade between different # run configurations as long as we pass the --force flag to "app run" @@ -517,7 +443,7 @@ def test_nativeapp_run_orphan( ) def test_nativeapp_force_cross_upgrade( test_project, - project_directory, + nativeapp_project_directory, run_args_from, run_args_to, runner, @@ -528,36 +454,30 @@ def test_nativeapp_force_cross_upgrade( app_name = f"{project_name}_{default_username}{resource_suffix}" pkg_name = f"{project_name}_pkg_{default_username}{resource_suffix}" - with project_directory(test_project): - try: - # Create version - result = runner.invoke_with_connection(["app", "version", "create", "v1"]) - assert result.exit_code == 0 - - # Set default release directive - result = runner.invoke_with_connection( - [ - "sql", - "-q", - f"alter application package {pkg_name} set default release directive version = v1 patch = 0", - ] - ) - assert result.exit_code == 0 + with nativeapp_project_directory(test_project): + # Create version + result = runner.invoke_with_connection(["app", "version", "create", "v1"]) + assert result.exit_code == 0 - # Initial run - result = runner.invoke_with_connection(["app", "run"] + run_args_from) - assert result.exit_code == 0 + # Set default release directive + result = runner.invoke_with_connection( + [ + "sql", + "-q", + f"alter application package {pkg_name} set default release directive version = v1 patch = 0", + ] + ) + assert result.exit_code == 0 - # (Cross-)upgrade - is_cross_upgrade = run_args_from != run_args_to - result = runner.invoke_with_connection( - ["app", "run"] + run_args_to + ["--force"] - ) - assert result.exit_code == 0 - if is_cross_upgrade: - assert f"Dropping application object {app_name}." in result.output + # Initial run + result = runner.invoke_with_connection(["app", "run"] + run_args_from) + assert result.exit_code == 0 - finally: - # Drop the package - result = runner.invoke_with_connection(["app", "teardown", "--force"]) - assert result.exit_code == 0 + # (Cross-)upgrade + is_cross_upgrade = run_args_from != run_args_to + result = runner.invoke_with_connection( + ["app", "run"] + run_args_to + ["--force"] + ) + assert result.exit_code == 0 + if is_cross_upgrade: + assert f"Dropping application object {app_name}." in result.output diff --git a/tests_integration/nativeapp/test_large_upload.py b/tests_integration/nativeapp/test_large_upload.py index 53fad983f9..126c51e857 100644 --- a/tests_integration/nativeapp/test_large_upload.py +++ b/tests_integration/nativeapp/test_large_upload.py @@ -97,6 +97,5 @@ def test_large_upload_skips_reupload( # make sure our file has been deleted temp_file.unlink(missing_ok=True) - # teardown is idempotent, so we can execute it again with no ill effects result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) assert result.exit_code == 0 diff --git a/tests_integration/nativeapp/test_open.py b/tests_integration/nativeapp/test_open.py index c4e67fd683..91465847f0 100644 --- a/tests_integration/nativeapp/test_open.py +++ b/tests_integration/nativeapp/test_open.py @@ -26,30 +26,24 @@ def test_nativeapp_open( mock_typer_launch, runner, test_project, - project_directory, + nativeapp_project_directory, default_username, resource_suffix, ): project_name = "myapp" app_name = f"{project_name}_{default_username}{resource_suffix}" - with project_directory(test_project): + with nativeapp_project_directory(test_project): result = runner.invoke_with_connection_json(["app", "run"]) assert result.exit_code == 0 - try: - result = runner.invoke_with_connection_json(["app", "open"]) - assert result.exit_code == 0 - assert "Snowflake Native App opened in browser." in result.output - mock_call = mock_typer_launch.call_args_list[0].args[0] - assert re.match( - rf"https://app.snowflake.com/.*#/apps/application/{app_name}", - mock_call, - re.IGNORECASE, - ) + result = runner.invoke_with_connection_json(["app", "open"]) + assert result.exit_code == 0 + assert "Snowflake Native App opened in browser." in result.output - finally: - result = runner.invoke_with_connection_json( - ["app", "teardown", "--force", "--cascade"] - ) - assert result.exit_code == 0 + mock_call = mock_typer_launch.call_args_list[0].args[0] + assert re.match( + rf"https://app.snowflake.com/.*#/apps/application/{app_name}", + mock_call, + re.IGNORECASE, + ) diff --git a/tests_integration/nativeapp/test_post_deploy.py b/tests_integration/nativeapp/test_post_deploy.py index b7aed5486a..fe3f9d4e57 100644 --- a/tests_integration/nativeapp/test_post_deploy.py +++ b/tests_integration/nativeapp/test_post_deploy.py @@ -75,7 +75,7 @@ def test_nativeapp_post_deploy( snowflake_session, default_username, resource_suffix, - project_directory, + nativeapp_project_directory, test_project, is_versioned, with_project_flag, @@ -85,7 +85,7 @@ def test_nativeapp_post_deploy( app_name = f"{project_name}_{default_username}{resource_suffix}" pkg_name = f"{project_name}_pkg_{default_username}{resource_suffix}" - with project_directory(test_project) as tmp_dir: + with nativeapp_project_directory(test_project) as tmp_dir: project_args = ["--project", f"{tmp_dir}"] if with_project_flag else [] version_run_args = ["--version", version] if is_versioned else [] diff --git a/tests_integration/nativeapp/test_project_templating.py b/tests_integration/nativeapp/test_project_templating.py index c0cf13d2ea..89c21fbdb0 100644 --- a/tests_integration/nativeapp/test_project_templating.py +++ b/tests_integration/nativeapp/test_project_templating.py @@ -34,6 +34,7 @@ def test_nativeapp_project_templating_use_env_from_os( snowflake_session, default_username, resource_suffix, + nativeapp_teardown, project_definition_files: List[Path], ): project_name = "integration" @@ -49,7 +50,7 @@ def test_nativeapp_project_templating_use_env_from_os( ) assert result.exit_code == 0 - try: + with nativeapp_teardown(env=local_test_env): # app + package exist package_name = f"{project_name}_{test_ci_env}_pkg_{default_username}{resource_suffix}".upper() app_name = f"{project_name}_{test_ci_env}_{default_username}{resource_suffix}".upper() @@ -89,21 +90,6 @@ def test_nativeapp_project_templating_use_env_from_os( {"ECHO": test_string}, ) - # make sure we always delete the app - result = runner.invoke_with_connection_json( - ["app", "teardown"], - env=local_test_env, - ) - assert result.exit_code == 0 - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json( - ["app", "teardown", "--force"], - env=local_test_env, - ) - assert result.exit_code == 0 - # Tests a simple flow of native app with template reading env variables from OS through an intermediate var @pytest.mark.integration @@ -118,6 +104,7 @@ def test_nativeapp_project_templating_use_env_from_os_through_intermediate_var( snowflake_session, default_username, resource_suffix, + nativeapp_teardown, project_definition_files: List[Path], ): project_name = "integration" @@ -133,7 +120,7 @@ def test_nativeapp_project_templating_use_env_from_os_through_intermediate_var( ) assert result.exit_code == 0 - try: + with nativeapp_teardown(env=local_test_env): # app + package exist package_name = f"{project_name}_{test_ci_env}_pkg_{default_username}{resource_suffix}".upper() app_name = f"{project_name}_{test_ci_env}_{default_username}{resource_suffix}".upper() @@ -173,21 +160,6 @@ def test_nativeapp_project_templating_use_env_from_os_through_intermediate_var( {"ECHO": test_string}, ) - # make sure we always delete the app - result = runner.invoke_with_connection_json( - ["app", "teardown"], - env=local_test_env, - ) - assert result.exit_code == 0 - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json( - ["app", "teardown", "--force"], - env=local_test_env, - ) - assert result.exit_code == 0 - # Tests a simple flow of native app with template reading default env values from project definition file @pytest.mark.integration @@ -202,6 +174,7 @@ def test_nativeapp_project_templating_use_default_env_from_project( snowflake_session, default_username, resource_suffix, + nativeapp_teardown, project_definition_files: List[Path], ): project_name = "integration" @@ -217,7 +190,7 @@ def test_nativeapp_project_templating_use_default_env_from_project( ) assert result.exit_code == 0 - try: + with nativeapp_teardown(env=local_test_env): # app + package exist package_name = f"{project_name}_{default_ci_env}_pkg_{default_username}{resource_suffix}".upper() app_name = f"{project_name}_{default_ci_env}_{default_username}{resource_suffix}".upper() @@ -257,21 +230,6 @@ def test_nativeapp_project_templating_use_default_env_from_project( {"ECHO": test_string}, ) - # make sure we always delete the app - result = runner.invoke_with_connection_json( - ["app", "teardown"], - env=local_test_env, - ) - assert result.exit_code == 0 - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json( - ["app", "teardown", "--force"], - env=local_test_env, - ) - assert result.exit_code == 0 - # Tests a native app with --env parameter through command line overwriting values from os env and project definition filetemplate reading env var @pytest.mark.integration @@ -286,6 +244,7 @@ def test_nativeapp_project_templating_use_env_from_cli_as_highest_priority( snowflake_session, default_username, resource_suffix, + nativeapp_teardown, project_definition_files: List[Path], ): project_name = "integration" @@ -303,7 +262,7 @@ def test_nativeapp_project_templating_use_env_from_cli_as_highest_priority( ) assert result.exit_code == 0 - try: + with nativeapp_teardown(env=local_test_env): # app + package exist package_name = f"{project_name}_{expected_value}_pkg_{default_username}{resource_suffix}".upper() app_name = f"{project_name}_{expected_value}_{default_username}{resource_suffix}".upper() @@ -350,14 +309,6 @@ def test_nativeapp_project_templating_use_env_from_cli_as_highest_priority( ) assert result.exit_code == 0 - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json( - ["app", "teardown", "--env", f"CI_ENV={expected_value}", "--force"], - env=local_test_env, - ) - assert result.exit_code == 0 - # Tests that other native app commands still succeed with templating @pytest.mark.integration @@ -369,6 +320,7 @@ def test_nativeapp_project_templating_use_env_from_cli_as_highest_priority( ) def test_nativeapp_project_templating_bundle_deploy_successful( runner, + nativeapp_teardown, project_definition_files: List[Path], ): project_dir = project_definition_files[0].parent @@ -377,7 +329,7 @@ def test_nativeapp_project_templating_bundle_deploy_successful( local_test_env = {"CI_ENV": test_ci_env, "APP_DIR": "app"} with pushd(project_dir): - try: + with nativeapp_teardown(env=local_test_env): result = runner.invoke_json( ["app", "bundle"], env=local_test_env, @@ -389,10 +341,3 @@ def test_nativeapp_project_templating_bundle_deploy_successful( env=local_test_env, ) assert result.exit_code == 0 - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json( - ["app", "teardown", "--force"], - env=local_test_env, - ) - assert result.exit_code == 0 diff --git a/tests_integration/nativeapp/test_teardown.py b/tests_integration/nativeapp/test_teardown.py index d3e945008e..8da5e650ae 100644 --- a/tests_integration/nativeapp/test_teardown.py +++ b/tests_integration/nativeapp/test_teardown.py @@ -45,7 +45,7 @@ def test_nativeapp_teardown_cascade( expected_error, orphan_app, test_project, - project_directory, + nativeapp_project_directory, runner, snowflake_session, default_username, @@ -55,7 +55,7 @@ def test_nativeapp_teardown_cascade( app_name = f"{project_name}_{default_username}{resource_suffix}".upper() db_name = f"{project_name}_db_{default_username}{resource_suffix}".upper() - with project_directory(test_project): + with nativeapp_project_directory(test_project): # Replacing the static DB name with a unique one to avoid collisions between tests with open("app/setup_script.sql", "r") as file: setup_script_content = file.read() @@ -67,78 +67,68 @@ def test_nativeapp_teardown_cascade( result = runner.invoke_with_connection_json(["app", "run"]) assert result.exit_code == 0 - try: - # Grant permission to create databases - snowflake_session.execute_string( - f"grant create database on account to application {app_name}", - ) - - # Create the database - snowflake_session.execute_string("use warehouse xsmall") - snowflake_session.execute_string( - f"call {app_name}.core.create_db()", - ) - - # Verify the database is owned by the app - assert contains_row_with( - row_from_snowflake_session( - snowflake_session.execute_string(f"show databases like '{db_name}'") - ), - dict(name=db_name, owner=app_name), - ) - - if orphan_app: - # orphan the app by dropping the application package, - # this causes future `show objects owned by application` queries to fail - # and `snow app teardown` needs to be resilient against this - package_name = ( - f"{project_name}_pkg_{default_username}{resource_suffix}".upper() - ) - snowflake_session.execute_string( - f"drop application package {package_name}" - ) - assert not_contains_row_with( - row_from_snowflake_session( - snowflake_session.execute_string( - f"show application packages like '{package_name}'", - ) - ), - dict(name=package_name), - ) + # Grant permission to create databases + snowflake_session.execute_string( + f"grant create database on account to application {app_name}", + ) - # Run the teardown command - result = runner.invoke_with_connection_json(command.split()) - if expected_error is not None: - assert result.exit_code == 1 - assert expected_error in result.output - return + # Create the database + snowflake_session.execute_string("use warehouse xsmall") + snowflake_session.execute_string( + f"call {app_name}.core.create_db()", + ) - assert result.exit_code == 0 + # Verify the database is owned by the app + assert contains_row_with( + row_from_snowflake_session( + snowflake_session.execute_string(f"show databases like '{db_name}'") + ), + dict(name=db_name, owner=app_name), + ) - # Verify the database is dropped - assert not_contains_row_with( - row_from_snowflake_session( - snowflake_session.execute_string(f"show databases like '{db_name}'") - ), - dict(name=db_name, owner=app_name), + if orphan_app: + # orphan the app by dropping the application package, + # this causes future `show objects owned by application` queries to fail + # and `snow app teardown` needs to be resilient against this + package_name = ( + f"{project_name}_pkg_{default_username}{resource_suffix}".upper() ) - - # Verify the app is dropped + snowflake_session.execute_string(f"drop application package {package_name}") assert not_contains_row_with( row_from_snowflake_session( snowflake_session.execute_string( - f"show applications like '{app_name}'", + f"show application packages like '{package_name}'", ) ), - dict(name=app_name), + dict(name=package_name), ) - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json( - ["app", "teardown", "--force", "--cascade"] - ) - assert result.exit_code == 0 + # Run the teardown command + result = runner.invoke_with_connection_json(command.split()) + if expected_error is not None: + assert result.exit_code == 1 + assert expected_error in result.output + return + + assert result.exit_code == 0 + + # Verify the database is dropped + assert not_contains_row_with( + row_from_snowflake_session( + snowflake_session.execute_string(f"show databases like '{db_name}'") + ), + dict(name=db_name, owner=app_name), + ) + + # Verify the app is dropped + assert not_contains_row_with( + row_from_snowflake_session( + snowflake_session.execute_string( + f"show applications like '{app_name}'", + ) + ), + dict(name=app_name), + ) @pytest.mark.integration @@ -151,32 +141,25 @@ def test_nativeapp_teardown_unowned_app( resource_suffix, force, test_project, - project_directory, + nativeapp_project_directory, ): project_name = "myapp" app_name = f"{project_name}_{default_username}{resource_suffix}" - with project_directory(test_project): + with nativeapp_project_directory(test_project): result = runner.invoke_with_connection_json(["app", "run"]) assert result.exit_code == 0 - try: - result = runner.invoke_with_connection_json( - ["sql", "-q", f"alter application {app_name} set comment = 'foo'"] - ) - assert result.exit_code == 0 - - if force: - result = runner.invoke_with_connection_json( - ["app", "teardown", "--force"] - ) - assert result.exit_code == 0 - else: - result = runner.invoke_with_connection_json(["app", "teardown"]) - assert result.exit_code == 1 + result = runner.invoke_with_connection_json( + ["sql", "-q", f"alter application {app_name} set comment = 'foo'"] + ) + assert result.exit_code == 0 - finally: + if force: result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) assert result.exit_code == 0 + else: + result = runner.invoke_with_connection_json(["app", "teardown"]) + assert result.exit_code == 1 @pytest.mark.integration @@ -189,47 +172,42 @@ def test_nativeapp_teardown_pkg_versions( resource_suffix, default_release_directive, test_project, - project_directory, + nativeapp_project_directory, ): project_name = "myapp" pkg_name = f"{project_name}_pkg_{default_username}{resource_suffix}" - with project_directory(test_project): + with nativeapp_project_directory(test_project): result = runner.invoke_with_connection(["app", "version", "create", "v1"]) assert result.exit_code == 0 - try: - # when setting a release directive, we will not have the ability to drop the version later - if default_release_directive: - result = runner.invoke_with_connection( - [ - "sql", - "-q", - f"alter application package {pkg_name} set default release directive version = v1 patch = 0", - ] - ) - assert result.exit_code == 0 - - # try to teardown; fail because we have a version - result = runner.invoke_with_connection(["app", "teardown"]) - assert result.exit_code == 1 - assert f"Drop versions first, or use --force to override." in result.output + # when setting a release directive, we will not have the ability to drop the version later + if default_release_directive: + result = runner.invoke_with_connection( + [ + "sql", + "-q", + f"alter application package {pkg_name} set default release directive version = v1 patch = 0", + ] + ) + assert result.exit_code == 0 - teardown_args = [] - if not default_release_directive: - # if we didn't set a release directive, we can drop the version and try again - result = runner.invoke_with_connection( - ["app", "version", "drop", "v1", "--force"] - ) - assert result.exit_code == 0 - else: - # if we did set a release directive, we need --force for teardown to work - teardown_args = ["--force"] + # try to teardown; fail because we have a version + result = runner.invoke_with_connection(["app", "teardown"]) + assert result.exit_code == 1 + assert f"Drop versions first, or use --force to override." in result.output - # either way, we can now tear down the application package - result = runner.invoke_with_connection(["app", "teardown"] + teardown_args) + teardown_args = [] + if not default_release_directive: + # if we didn't set a release directive, we can drop the version and try again + result = runner.invoke_with_connection( + ["app", "version", "drop", "v1", "--force"] + ) assert result.exit_code == 0 + else: + # if we did set a release directive, we need --force for teardown to work + teardown_args = ["--force"] - finally: - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 + # either way, we can now tear down the application package + result = runner.invoke_with_connection(["app", "teardown"] + teardown_args) + assert result.exit_code == 0 diff --git a/tests_integration/nativeapp/test_validate.py b/tests_integration/nativeapp/test_validate.py index 21f0ca8dfe..50084469c6 100644 --- a/tests_integration/nativeapp/test_validate.py +++ b/tests_integration/nativeapp/test_validate.py @@ -21,35 +21,25 @@ @pytest.mark.integration @enable_definition_v2_feature_flag @pytest.mark.parametrize("test_project", ["napp_init_v1", "napp_init_v2"]) -def test_nativeapp_validate(test_project, project_directory, runner): - with project_directory(test_project): - try: - # validate the app's setup script - result = runner.invoke_with_connection(["app", "validate"]) - assert result.exit_code == 0, result.output - assert "Native App validation succeeded." in result.output - finally: - result = runner.invoke_with_connection(["app", "teardown", "--force"]) - assert result.exit_code == 0, result.output +def test_nativeapp_validate(test_project, nativeapp_project_directory, runner): + with nativeapp_project_directory(test_project): + # validate the app's setup script + result = runner.invoke_with_connection(["app", "validate"]) + assert result.exit_code == 0, result.output + assert "Native App validation succeeded." in result.output @pytest.mark.integration @enable_definition_v2_feature_flag @pytest.mark.parametrize("test_project", ["napp_init_v1", "napp_init_v2"]) -def test_nativeapp_validate_failing(test_project, project_directory, runner): - with project_directory(test_project): +def test_nativeapp_validate_failing(test_project, nativeapp_project_directory, runner): + with nativeapp_project_directory(test_project): # Create invalid SQL file Path("app/setup_script.sql").write_text("Lorem ipsum dolor sit amet") - try: - # validate the app's setup script, this will fail - # because we include an empty file - result = runner.invoke_with_connection(["app", "validate"]) - assert result.exit_code == 1, result.output - assert ( - "Snowflake Native App setup script failed validation." in result.output - ) - assert "syntax error" in result.output - finally: - result = runner.invoke_with_connection(["app", "teardown", "--force"]) - assert result.exit_code == 0, result.output + # validate the app's setup script, this will fail + # because we include an empty file + result = runner.invoke_with_connection(["app", "validate"]) + assert result.exit_code == 1, result.output + assert "Snowflake Native App setup script failed validation." in result.output + assert "syntax error" in result.output diff --git a/tests_integration/nativeapp/test_version.py b/tests_integration/nativeapp/test_version.py index bb35ffb84b..18859a705e 100644 --- a/tests_integration/nativeapp/test_version.py +++ b/tests_integration/nativeapp/test_version.py @@ -35,6 +35,7 @@ def test_nativeapp_version_create_and_drop( snowflake_session, default_username, resource_suffix, + nativeapp_teardown, project_definition_files: List[Path], ): project_name = "integration" @@ -45,7 +46,7 @@ def test_nativeapp_version_create_and_drop( ) assert result_create.exit_code == 0 - try: + with nativeapp_teardown(): # package exist package_name = ( f"{project_name}_pkg_{default_username}{resource_suffix}".upper() @@ -73,22 +74,6 @@ def test_nativeapp_version_create_and_drop( actual = runner.invoke_with_connection_json(["app", "version", "list"]) assert len(actual.json) == 0 - # make sure we always delete the package - result = runner.invoke_with_connection_json(["app", "teardown"]) - assert result.exit_code == 0 - - expect = snowflake_session.execute_string( - f"show application packages like '{package_name}'" - ) - assert not_contains_row_with( - row_from_snowflake_session(expect), {"name": package_name} - ) - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 - # Tests upgrading an app from an existing loose files installation to versioned installation. @pytest.mark.integration @@ -101,6 +86,7 @@ def test_nativeapp_upgrade( snowflake_session, default_username, resource_suffix, + nativeapp_teardown, project_definition_files: List[Path], ): project_name = "integration" @@ -111,7 +97,7 @@ def test_nativeapp_upgrade( ["app", "version", "create", "v1", "--force", "--skip-git-check"] ) - try: + with nativeapp_teardown(): # package exist package_name = ( f"{project_name}_pkg_{default_username}{resource_suffix}".upper() @@ -139,15 +125,6 @@ def test_nativeapp_upgrade( ["app", "version", "drop", "v1", "--force"] ) - # make sure we always delete the package - result = runner.invoke_with_connection_json(["app", "teardown"]) - assert result.exit_code == 0 - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 - # Make sure we can create 3+ patches on the same version @pytest.mark.integration @@ -158,12 +135,13 @@ def test_nativeapp_version_create_3_patches( snowflake_session, default_username, resource_suffix, + nativeapp_teardown, project_definition_files: List[Path], ): project_name = "integration" project_dir = project_definition_files[0].parent with pushd(project_dir): - try: + with nativeapp_teardown(): package_name = ( f"{project_name}_pkg_{default_username}{resource_suffix}".upper() ) @@ -195,22 +173,6 @@ def test_nativeapp_version_create_3_patches( actual = runner.invoke_with_connection_json(["app", "version", "list"]) assert len(actual.json) == 0 - # make sure we always delete the package - result = runner.invoke_with_connection_json(["app", "teardown"]) - assert result.exit_code == 0 - - expect = snowflake_session.execute_string( - f"show application packages like '{package_name}'" - ) - assert not_contains_row_with( - row_from_snowflake_session(expect), {"name": package_name} - ) - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 - @pytest.mark.integration @enable_definition_v2_feature_flag @@ -222,12 +184,13 @@ def test_nativeapp_version_create_patch_is_integer( snowflake_session, default_username, resource_suffix, + nativeapp_teardown, project_definition_files: List[Path], ): project_name = "integration" project_dir = project_definition_files[0].parent with pushd(project_dir): - try: + with nativeapp_teardown(): package_name = ( f"{project_name}_pkg_{default_username}{resource_suffix}".upper() ) @@ -282,21 +245,6 @@ def test_nativeapp_version_create_patch_is_integer( actual = runner.invoke_with_connection_json(["app", "version", "list"]) assert len(actual.json) == 0 - # make sure we always delete the package - result = runner.invoke_with_connection_json(["app", "teardown"]) - assert result.exit_code == 0 - - expect = snowflake_session.execute_string( - f"show application packages like '{package_name}'" - ) - assert not_contains_row_with( - row_from_snowflake_session(expect), {"name": package_name} - ) - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 - # Tests creating a version for a package that was not created by the CLI # (doesn't have the magic CLI comment) @@ -310,6 +258,7 @@ def test_nativeapp_version_create_package_no_magic_comment( snowflake_session, default_username, resource_suffix, + nativeapp_teardown, snapshot, project_definition_files: List[Path], ): @@ -319,7 +268,7 @@ def test_nativeapp_version_create_package_no_magic_comment( result_create_abort = runner.invoke_with_connection_json(["app", "deploy"]) assert result_create_abort.exit_code == 0 - try: + with nativeapp_teardown(): # package exist package_name = ( f"{project_name}_pkg_{default_username}{resource_suffix}".upper() @@ -384,7 +333,3 @@ def test_nativeapp_version_create_package_no_magic_comment( # Remove date field row.pop("created_on", None) assert actual.json == snapshot - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0