From 73fa2192fafa1d9c87cf3131cb5ca1078466fa91 Mon Sep 17 00:00:00 2001 From: Jan Sikorski Date: Sun, 25 Aug 2024 08:17:12 +0200 Subject: [PATCH] Basic solution SNOW-1636849 Auto-teardown Native App in integration tests (#1478) Changes `with project_directory()` to `with nativeapp_project_directory()`, which automatically runs `snow app teardown` before exiting the project. This allows us to remove the `try`/`finally` in most tests. For tests that were using `with pushd(test_project)`, this has been changed to `with nativeapp_teardown()`, which is what `with nativeapp_project_directory()` uses under the hood. SNOW-1621834 Cast version to identifier when creating/dropping app versions (#1475) When running `snow app version create` and `snow app version drop`, wrap the version in `to_identifier()` so users don't have to specify the quotes around version names that aren't valid identifiers. If the name is already quoted, `to_identifier()` doesn't do anything. Added tests Added tests --- RELEASE-NOTES.md | 1 + .../nativeapp/version/version_processor.py | 16 +- .../api/project/schemas/entities/common.py | 4 + .../api/project/schemas/project_definition.py | 20 + .../test_version_create_processor.py | 37 +- .../nativeapp/test_version_drop_processor.py | 83 ++- tests/project/test_project_definition_v2.py | 26 + .../projects/mixins_basic/snowflake.yml | 28 + .../mixins_defaults_hierarchy/snowflake.yml | 29 + .../mixins_different_entities/environment.yml | 5 + .../mixins_different_entities/snowflake.yml | 45 ++ .../streamlit_app.py | 0 tests_integration/nativeapp/conftest.py | 63 +++ tests_integration/nativeapp/test_bundle.py | 4 +- .../nativeapp/test_debug_mode.py | 12 +- tests_integration/nativeapp/test_deploy.py | 494 +++++++----------- tests_integration/nativeapp/test_events.py | 14 +- tests_integration/nativeapp/test_init_run.py | 258 ++++----- .../nativeapp/test_large_upload.py | 1 - tests_integration/nativeapp/test_open.py | 28 +- .../nativeapp/test_post_deploy.py | 4 +- .../nativeapp/test_project_templating.py | 75 +-- tests_integration/nativeapp/test_teardown.py | 204 ++++---- tests_integration/nativeapp/test_validate.py | 38 +- tests_integration/nativeapp/test_version.py | 75 +-- 25 files changed, 766 insertions(+), 798 deletions(-) create mode 100644 tests/test_data/projects/mixins_basic/snowflake.yml create mode 100644 tests/test_data/projects/mixins_defaults_hierarchy/snowflake.yml create mode 100644 tests/test_data/projects/mixins_different_entities/environment.yml create mode 100644 tests/test_data/projects/mixins_different_entities/snowflake.yml create mode 100644 tests/test_data/projects/mixins_different_entities/streamlit_app.py create mode 100644 tests_integration/nativeapp/conftest.py diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 7e90b8c362..bf1b83c2c8 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -42,6 +42,7 @@ * Fix the typo in spcs service name argument description. It is the identifier of the **service** instead of the **service pool**. * Fix error handling and improve messaging when no artifacts provided. * Improved error message for incompatible parameters. +* Fixed SQL error when running `snow app version create` and `snow app version drop` with a version name that isn't a valid Snowflake unquoted identifier # v2.8.0 diff --git a/src/snowflake/cli/_plugins/nativeapp/version/version_processor.py b/src/snowflake/cli/_plugins/nativeapp/version/version_processor.py index 274c82357d..159d2b6303 100644 --- a/src/snowflake/cli/_plugins/nativeapp/version/version_processor.py +++ b/src/snowflake/cli/_plugins/nativeapp/version/version_processor.py @@ -39,10 +39,9 @@ from snowflake.cli.api.console import cli_console as cc from snowflake.cli.api.exceptions import SnowflakeSQLExecutionError from snowflake.cli.api.project.schemas.native_app.native_app import NativeApp -from snowflake.cli.api.project.util import unquote_identifier +from snowflake.cli.api.project.util import to_identifier, unquote_identifier from snowflake.cli.api.utils.cursor import ( find_all_rows, - find_first_row, ) from snowflake.connector import ProgrammingError from snowflake.connector.cursor import DictCursor @@ -121,6 +120,8 @@ def add_new_version(self, version: str) -> None: """ Defines a new version in an existing application package. """ + # Make the version a valid identifier, adding quotes if necessary + version = to_identifier(version) with self.use_role(self.package_role): cc.step( f"Defining a new version {version} in application package {self.package_name}" @@ -141,6 +142,8 @@ def add_new_patch_to_version(self, version: str, patch: Optional[int] = None): """ Add a new patch, optionally a custom one, to an existing version in an application package. """ + # Make the version a valid identifier, adding quotes if necessary + version = to_identifier(version) with self.use_role(self.package_role): cc.step( f"Adding new patch to version {version} defined in application package {self.package_name}" @@ -156,11 +159,7 @@ def add_new_patch_to_version(self, version: str, patch: Optional[int] = None): add_version_query, cursor_class=DictCursor ) - show_row = find_first_row( - result_cursor, - lambda row: row[VERSION_COL] == unquote_identifier(version), - ) - + show_row = result_cursor.fetchall()[0] new_patch = show_row["patch"] cc.message( f"Patch {new_patch} created for version {version} defined in application package {self.package_name}." @@ -328,6 +327,9 @@ def process( "Manifest.yml file does not contain a value for the version field." ) + # Make the version a valid identifier, adding quotes if necessary + version = to_identifier(version) + cc.step( dedent( f"""\ diff --git a/src/snowflake/cli/api/project/schemas/entities/common.py b/src/snowflake/cli/api/project/schemas/entities/common.py index 46922588c1..ff7e01c78a 100644 --- a/src/snowflake/cli/api/project/schemas/entities/common.py +++ b/src/snowflake/cli/api/project/schemas/entities/common.py @@ -41,6 +41,10 @@ class MetaField(UpdatableModel): title="Actions that will be executed after the application object is created/upgraded", default=None, ) + use_mixin: Optional[str] = Field( + title="Name of the mixin used to fill the entity fields", + default=None, + ) class DefaultsField(UpdatableModel): diff --git a/src/snowflake/cli/api/project/schemas/project_definition.py b/src/snowflake/cli/api/project/schemas/project_definition.py index ce74e38739..2c0535c8e0 100644 --- a/src/snowflake/cli/api/project/schemas/project_definition.py +++ b/src/snowflake/cli/api/project/schemas/project_definition.py @@ -194,6 +194,26 @@ def _validate_target_field( default=None, ) + mixins: Optional[Dict[str, Dict]] = Field( + title="Mixins to apply to entities", + default=None, + ) + + @model_validator(mode="before") + @classmethod + def apply_mixins(cls, data: Dict) -> Dict: + """ + Applies mixins to those entities, whose meta field contains the mixin name. + """ + if "mixins" in data and "entities" in data: + for name, mixin in data.get("mixins", {}).items(): + for entity in data["entities"].values(): + if entity.get("meta", {}).get("use_mixin") == name: + for key in mixin.keys(): + if key in entity.keys(): + entity[key] = mixin[key] + return data + def get_entities_by_type(self, entity_type: str): return {i: e for i, e in self.entities.items() if e.get_type() == entity_type} diff --git a/tests/nativeapp/test_version_create_processor.py b/tests/nativeapp/test_version_create_processor.py index 79fb506b10..b418addc5d 100644 --- a/tests/nativeapp/test_version_create_processor.py +++ b/tests/nativeapp/test_version_create_processor.py @@ -105,8 +105,11 @@ def test_get_existing_release_direction_info(mock_execute, temp_dir, mock_cursor # Test add_new_version adds a new version to an app pkg correctly @mock.patch(NATIVEAPP_MANAGER_EXECUTE) -def test_add_version(mock_execute, temp_dir, mock_cursor): - version = "V1" +@pytest.mark.parametrize( + ["version", "version_identifier"], + [("V1", "V1"), ("1.0.0", '"1.0.0"'), ('"1.0.0"', '"1.0.0"')], +) +def test_add_version(mock_execute, temp_dir, mock_cursor, version, version_identifier): side_effects, expected = mock_execute_helper( [ ( @@ -120,7 +123,7 @@ def test_add_version(mock_execute, temp_dir, mock_cursor): dedent( f"""\ alter application package app_pkg - add version V1 + add version {version_identifier} using @app_pkg.app_src.stage """ ), @@ -146,8 +149,13 @@ def test_add_version(mock_execute, temp_dir, mock_cursor): # Test add_new_patch_to_version adds an "auto-increment" patch to an existing version @mock.patch(NATIVEAPP_MANAGER_EXECUTE) -def test_add_new_patch_auto(mock_execute, temp_dir, mock_cursor): - version = "V1" +@pytest.mark.parametrize( + ["version", "version_identifier"], + [("V1", "V1"), ("1.0.0", '"1.0.0"'), ('"1.0.0"', '"1.0.0"')], +) +def test_add_new_patch_auto( + mock_execute, temp_dir, mock_cursor, version, version_identifier +): side_effects, expected = mock_execute_helper( [ ( @@ -161,7 +169,7 @@ def test_add_new_patch_auto(mock_execute, temp_dir, mock_cursor): dedent( f"""\ alter application package app_pkg - add patch for version V1 + add patch for version {version_identifier} using @app_pkg.app_src.stage """ ), @@ -187,8 +195,13 @@ def test_add_new_patch_auto(mock_execute, temp_dir, mock_cursor): # Test add_new_patch_to_version adds a custom patch to an existing version @mock.patch(NATIVEAPP_MANAGER_EXECUTE) -def test_add_new_patch_custom(mock_execute, temp_dir, mock_cursor): - version = "V1" +@pytest.mark.parametrize( + ["version", "version_identifier"], + [("V1", "V1"), ("1.0.0", '"1.0.0"'), ('"1.0.0"', '"1.0.0"')], +) +def test_add_new_patch_custom( + mock_execute, temp_dir, mock_cursor, version, version_identifier +): side_effects, expected = mock_execute_helper( [ ( @@ -202,7 +215,7 @@ def test_add_new_patch_custom(mock_execute, temp_dir, mock_cursor): dedent( f"""\ alter application package app_pkg - add patch 12 for version V1 + add patch 12 for version {version_identifier} using @app_pkg.app_src.stage """ ), @@ -222,7 +235,7 @@ def test_add_new_patch_custom(mock_execute, temp_dir, mock_cursor): ) processor = _get_version_create_processor() - processor.add_new_patch_to_version(version, "12") + processor.add_new_patch_to_version(version, 12) assert mock_execute.mock_calls == expected @@ -282,7 +295,7 @@ def test_process_no_version_exists_throws_bad_option_exception_one( processor.process( bundle_map=mock_bundle_map, version="v1", - patch="12", + patch=12, policy=policy_param, git_policy=policy_param, is_interactive=False, @@ -315,7 +328,7 @@ def test_process_no_version_exists_throws_bad_option_exception_two( processor.process( bundle_map=mock_bundle_map, version="v1", - patch="12", + patch=12, policy=policy_param, git_policy=policy_param, is_interactive=False, diff --git a/tests/nativeapp/test_version_drop_processor.py b/tests/nativeapp/test_version_drop_processor.py index 61e7cb6161..9136f25d21 100644 --- a/tests/nativeapp/test_version_drop_processor.py +++ b/tests/nativeapp/test_version_drop_processor.py @@ -96,13 +96,13 @@ def test_process_has_no_existing_app_pkg(mock_get_existing, policy_param, temp_d def test_process_no_version_from_user_no_version_in_manifest( mock_version_info_in_manifest, mock_build_bundle, - mock_mismatch, + mock_distribution, mock_get_existing, policy_param, temp_dir, ): - mock_mismatch.return_Value = "internal" + mock_distribution.return_value = "internal" current_working_directory = os.getcwd() create_named_file( file_name="snowflake.yml", @@ -144,7 +144,7 @@ def test_process_drop_cannot_complete( mock_typer_confirm, mock_version_info_in_manifest, mock_build_bundle, - mock_mismatch, + mock_distribution, mock_get_existing, policy_param, is_interactive_param, @@ -152,7 +152,7 @@ def test_process_drop_cannot_complete( temp_dir, ): - mock_mismatch.return_value = "internal" + mock_distribution.return_value = "internal" current_working_directory = os.getcwd() create_named_file( file_name="snowflake.yml", @@ -190,12 +190,12 @@ def test_process_drop_cannot_complete( (ask_always_policy, True), ], ) -def test_process_drop_success( +def test_process_drop_from_manifest( mock_typer_confirm, mock_execute, mock_version_info_in_manifest, mock_build_bundle, - mock_mismatch, + mock_distribution, mock_get_existing, policy_param, is_interactive_param, @@ -203,7 +203,7 @@ def test_process_drop_success( mock_cursor, ): - mock_mismatch.return_value = "internal" + mock_distribution.return_value = "internal" side_effects, expected = mock_execute_helper( [ ( @@ -234,3 +234,72 @@ def test_process_drop_success( version=None, policy=policy_param, is_interactive=is_interactive_param ) assert mock_execute.mock_calls == expected + + +@mock.patch( + f"{VERSION_MODULE}.{DROP_PROCESSOR}.get_existing_app_pkg_info", + return_value={"owner": "package_role"}, +) +@mock_get_app_pkg_distribution_in_sf() +@mock.patch(f"{VERSION_MODULE}.{DROP_PROCESSOR}.build_bundle", return_value=None) +@mock.patch(NATIVEAPP_MANAGER_EXECUTE) +@mock.patch( + f"snowflake.cli._plugins.nativeapp.policy.{TYPER_CONFIRM}", return_value=True +) +@pytest.mark.parametrize( + "policy_param, is_interactive_param", + [ + (allow_always_policy, False), + (ask_always_policy, True), + (ask_always_policy, True), + ], +) +@pytest.mark.parametrize( + ["version", "version_identifier"], + [("V1", "V1"), ("1.0.0", '"1.0.0"'), ('"1.0.0"', '"1.0.0"')], +) +def test_process_drop_specific_version( + mock_typer_confirm, + mock_execute, + mock_build_bundle, + mock_distribution, + mock_get_existing, + policy_param, + is_interactive_param, + temp_dir, + mock_cursor, + version, + version_identifier, +): + + mock_distribution.return_value = "internal" + side_effects, expected = mock_execute_helper( + [ + ( + mock_cursor([{"CURRENT_ROLE()": "old_role"}], []), + mock.call("select current_role()", cursor_class=DictCursor), + ), + (None, mock.call("use role package_role")), + ( + None, + mock.call( + f"alter application package app_pkg drop version {version_identifier}" + ), + ), + (None, mock.call("use role old_role")), + ] + ) + mock_execute.side_effect = side_effects + + current_working_directory = os.getcwd() + create_named_file( + file_name="snowflake.yml", + dir_name=current_working_directory, + contents=[mock_snowflake_yml_file], + ) + + processor = _get_version_drop_processor() + processor.process( + version=version, policy=policy_param, is_interactive=is_interactive_param + ) + assert mock_execute.mock_calls == expected diff --git a/tests/project/test_project_definition_v2.py b/tests/project/test_project_definition_v2.py index 16ded60fa4..805a226388 100644 --- a/tests/project/test_project_definition_v2.py +++ b/tests/project/test_project_definition_v2.py @@ -385,6 +385,32 @@ def test_v1_to_v2_conversion( _assert_entities_are_equal(v1_function, v2_function) +@pytest.mark.parametrize( + "project_name,stage1,stage2", + [("mixins_basic", "foo", "bar"), ("mixins_defaults_hierarchy", "foo", "baz")], +) +def test_mixins(project_directory, project_name, stage1, stage2): + with project_directory(project_name) as project_dir: + definition = DefinitionManager(project_dir).project_definition + + assert definition.entities["function1"].stage == stage1 + assert definition.entities["function2"].stage == stage2 + + +def test_mixins_for_different_entities(project_directory): + with project_directory("mixins_different_entities") as project_dir: + definition = DefinitionManager(project_dir).project_definition + + assert definition.entities["function1"].stage == "foo" + assert definition.entities["streamlit1"].main_file == "streamlit_app.py" + + +# TODO: add tests for the following: +# - entities of different types - only entity specific fields should be updated +# - entities with different mixins +# - which prevailes - defaults or mixins? Check order + + def _assert_entities_are_equal( v1_entity: _CallableBase, v2_entity: SnowparkEntityModel ): diff --git a/tests/test_data/projects/mixins_basic/snowflake.yml b/tests/test_data/projects/mixins_basic/snowflake.yml new file mode 100644 index 0000000000..c175398196 --- /dev/null +++ b/tests/test_data/projects/mixins_basic/snowflake.yml @@ -0,0 +1,28 @@ +definition_version: '2' +entities: + function1: + artifacts: + - src + handler: app.hello + identifier: name + meta: + use_mixin: my_mixin + returns: string + signature: + - name: name + type: string + type: function + function2: + artifacts: + - src + handler: app.hello + identifier: name + returns: string + signature: + - name: name + type: string + stage: bar + type: function +mixins: + my_mixin: + stage: foo diff --git a/tests/test_data/projects/mixins_defaults_hierarchy/snowflake.yml b/tests/test_data/projects/mixins_defaults_hierarchy/snowflake.yml new file mode 100644 index 0000000000..af1d3ed374 --- /dev/null +++ b/tests/test_data/projects/mixins_defaults_hierarchy/snowflake.yml @@ -0,0 +1,29 @@ +definition_version: '2' +entities: + function1: + artifacts: + - src + handler: app.hello + identifier: name + meta: + use_mixin: my_mixin + returns: string + signature: + - name: name + type: string + type: function + function2: + artifacts: + - src + handler: app.hello + identifier: name + returns: string + signature: + - name: name + type: string + type: function +defaults: + stage: baz +mixins: + my_mixin: + stage: foo diff --git a/tests/test_data/projects/mixins_different_entities/environment.yml b/tests/test_data/projects/mixins_different_entities/environment.yml new file mode 100644 index 0000000000..ac8feac3e8 --- /dev/null +++ b/tests/test_data/projects/mixins_different_entities/environment.yml @@ -0,0 +1,5 @@ +name: sf_env +channels: + - snowflake +dependencies: + - pandas diff --git a/tests/test_data/projects/mixins_different_entities/snowflake.yml b/tests/test_data/projects/mixins_different_entities/snowflake.yml new file mode 100644 index 0000000000..b45d697c9f --- /dev/null +++ b/tests/test_data/projects/mixins_different_entities/snowflake.yml @@ -0,0 +1,45 @@ +definition_version: '2' +entities: + function1: + artifacts: + - src + handler: app.hello + identifier: name + meta: + use_mixin: my_mixin + returns: string + signature: + - name: name + type: string + type: function + function2: + artifacts: + - src + handler: app.hello + identifier: name + returns: string + signature: + - name: name + type: string + type: function + streamlit1: + artifacts: + - streamlit_app.py + - environment.yml + - pages + identifier: + name: test_streamlit + pages_dir: non_existent_dir + query_warehouse: test_warehouse + stage: streamlit + title: My Fancy Streamlit + type: streamlit + meta: + use_mixin: my_mixin +defaults: + stage: baz +mixins: + my_mixin: + stage: foo + main_file: streamlit_app.py + pages_dir: pages diff --git a/tests/test_data/projects/mixins_different_entities/streamlit_app.py b/tests/test_data/projects/mixins_different_entities/streamlit_app.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests_integration/nativeapp/conftest.py b/tests_integration/nativeapp/conftest.py new file mode 100644 index 0000000000..6b909b4d18 --- /dev/null +++ b/tests_integration/nativeapp/conftest.py @@ -0,0 +1,63 @@ +from __future__ import annotations + +from contextlib import contextmanager +from pathlib import Path +from typing import Any + +import pytest + +from tests_integration.conftest import SnowCLIRunner + + +@pytest.fixture +def nativeapp_project_directory(project_directory, nativeapp_teardown): + """Wrapper around the project_directory fixture specific to Native App testing. + + This fixture provides a context manager that does the following: + - Automatically calls `snow app teardown` before exiting + + Parameters for the returned context manager: + :param name: The name of the directory in tests_integration/test_data/projects to use. + """ + + @contextmanager + def _nativeapp_project_directory(name): + with project_directory(name) as d: + with nativeapp_teardown(project_dir=d): + yield d + + return _nativeapp_project_directory + + +@pytest.fixture +def nativeapp_teardown(runner: SnowCLIRunner): + """Runs `snow app teardown` before exiting. + + This fixture provides a context manager that runs + `snow app teardown --force --cascade` before exiting, + regardless of any exceptions raised. + + Parameters for the returned context manager: + :param project_dir: Path to the project directory (optional) + :param env: Environment variables to replace os.environ (optional) + """ + + @contextmanager + def _nativeapp_teardown( + *, + project_dir: Path | None = None, + env: dict | None = None, + ): + try: + yield + finally: + args = ["--force", "--cascade"] + if project_dir: + args += ["--project", str(project_dir)] + kwargs: dict[str, Any] = {} + if env: + kwargs["env"] = env + result = runner.invoke_with_connection(["app", "teardown", *args], **kwargs) + assert result.exit_code == 0 + + return _nativeapp_teardown diff --git a/tests_integration/nativeapp/test_bundle.py b/tests_integration/nativeapp/test_bundle.py index 23e9e776bb..fc4f665450 100644 --- a/tests_integration/nativeapp/test_bundle.py +++ b/tests_integration/nativeapp/test_bundle.py @@ -24,10 +24,10 @@ @pytest.fixture(scope="function", params=["napp_init_v1", "napp_init_v2"]) -def template_setup(runner, project_directory, request): +def template_setup(runner, nativeapp_project_directory, request): test_project = request.param with enable_definition_v2_feature_flag: - with project_directory(test_project) as project_root: + with nativeapp_project_directory(test_project) as project_root: # Vanilla bundle on the unmodified template result = runner.invoke_json(["app", "bundle"]) assert result.exit_code == 0 diff --git a/tests_integration/nativeapp/test_debug_mode.py b/tests_integration/nativeapp/test_debug_mode.py index e782ee07ea..6de8e21033 100644 --- a/tests_integration/nativeapp/test_debug_mode.py +++ b/tests_integration/nativeapp/test_debug_mode.py @@ -90,6 +90,7 @@ def test_nativeapp_controlled_debug_mode( snowflake_session, default_username, resource_suffix, + nativeapp_teardown, project_definition_files: List[Path], ): project_name = "integration" @@ -110,7 +111,7 @@ def test_nativeapp_controlled_debug_mode( result = runner.invoke_with_connection_json(["app", "run"]) assert result.exit_code == 0 - try: + with nativeapp_teardown(): # debug mode should be true by default on first app deploy, # because snowflake.yml doesn't set it explicitly either way ("uncontrolled") assert is_debug_mode(snowflake_session, app_name) @@ -135,12 +136,3 @@ def test_nativeapp_controlled_debug_mode( result = runner.invoke_with_connection_json(["app", "run"]) assert result.exit_code == 0 assert is_debug_mode(snowflake_session, app_name) - - # make sure we always delete the app - result = runner.invoke_with_connection_json(["app", "teardown"]) - assert result.exit_code == 0 - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 diff --git a/tests_integration/nativeapp/test_deploy.py b/tests_integration/nativeapp/test_deploy.py index 4244512718..e028cff91e 100644 --- a/tests_integration/nativeapp/test_deploy.py +++ b/tests_integration/nativeapp/test_deploy.py @@ -49,7 +49,7 @@ def _sanitize_deploy_output(output): @pytest.mark.parametrize("test_project", ["napp_init_v1", "napp_init_v2"]) def test_nativeapp_deploy( test_project, - project_directory, + nativeapp_project_directory, runner, snowflake_session, default_username, @@ -59,52 +59,44 @@ def test_nativeapp_deploy( print_paths_as_posix, ): project_name = "myapp" - with project_directory(test_project): + with nativeapp_project_directory(test_project): result = runner.invoke_with_connection(["app", "deploy"]) assert result.exit_code == 0 assert sanitize_deploy_output(result.output) == snapshot - try: - # package exist - package_name = ( - f"{project_name}_pkg_{default_username}{resource_suffix}".upper() - ) - app_name = f"{project_name}_{default_username}{resource_suffix}".upper() - assert contains_row_with( - row_from_snowflake_session( - snowflake_session.execute_string( - f"show application packages like '{package_name}'", - ) - ), - dict(name=package_name), - ) - - # manifest file exists - stage_name = "app_src.stage" # as defined in native-apps-templates/basic - stage_files = runner.invoke_with_connection_json( - ["stage", "list-files", f"{package_name}.{stage_name}"] - ) - assert contains_row_with(stage_files.json, {"name": "stage/manifest.yml"}) - - # app does not exist - assert not_contains_row_with( - row_from_snowflake_session( - snowflake_session.execute_string( - f"show applications like '{app_name}'", - ) - ), - dict(name=app_name), - ) - - # re-deploying should be a no-op; make sure we don't issue any PUT commands - result = runner.invoke_with_connection_json(["app", "deploy", "--debug"]) - assert result.exit_code == 0 - assert "Successfully uploaded chunk 0 of file" not in result.output - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 + # package exist + package_name = f"{project_name}_pkg_{default_username}{resource_suffix}".upper() + app_name = f"{project_name}_{default_username}{resource_suffix}".upper() + assert contains_row_with( + row_from_snowflake_session( + snowflake_session.execute_string( + f"show application packages like '{package_name}'", + ) + ), + dict(name=package_name), + ) + + # manifest file exists + stage_name = "app_src.stage" # as defined in native-apps-templates/basic + stage_files = runner.invoke_with_connection_json( + ["stage", "list-files", f"{package_name}.{stage_name}"] + ) + assert contains_row_with(stage_files.json, {"name": "stage/manifest.yml"}) + + # app does not exist + assert not_contains_row_with( + row_from_snowflake_session( + snowflake_session.execute_string( + f"show applications like '{app_name}'", + ) + ), + dict(name=app_name), + ) + + # re-deploying should be a no-op; make sure we don't issue any PUT commands + result = runner.invoke_with_connection_json(["app", "deploy", "--debug"]) + assert result.exit_code == 0 + assert "Successfully uploaded chunk 0 of file" not in result.output @pytest.mark.integration @@ -130,7 +122,7 @@ def test_nativeapp_deploy_prune( contains, not_contains, test_project, - project_directory, + nativeapp_project_directory, runner, snapshot, print_paths_as_posix, @@ -139,40 +131,28 @@ def test_nativeapp_deploy_prune( sanitize_deploy_output, ): project_name = "myapp" - with project_directory(test_project): + with nativeapp_project_directory(test_project): result = runner.invoke_with_connection_json(["app", "deploy"]) assert result.exit_code == 0 - try: - # delete a file locally - os.remove(os.path.join("app", "README.md")) - - # deploy - result = runner.invoke_with_connection(command.split()) - assert result.exit_code == 0 - assert sanitize_deploy_output(result.output) == snapshot - - # verify the file does not exist on the stage - package_name = ( - f"{project_name}_pkg_{default_username}{resource_suffix}".upper() - ) - stage_name = "app_src.stage" # as defined in native-apps-templates/basic - stage_files = runner.invoke_with_connection_json( - ["stage", "list-files", f"{package_name}.{stage_name}"] - ) - for name in contains: - assert contains_row_with(stage_files.json, {"name": name}) - for name in not_contains: - assert not_contains_row_with(stage_files.json, {"name": name}) - - # make sure we always delete the app - result = runner.invoke_with_connection_json(["app", "teardown"]) - assert result.exit_code == 0 - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 + # delete a file locally + os.remove(os.path.join("app", "README.md")) + + # deploy + result = runner.invoke_with_connection(command.split()) + assert result.exit_code == 0 + assert sanitize_deploy_output(result.output) == snapshot + + # verify the file does not exist on the stage + package_name = f"{project_name}_pkg_{default_username}{resource_suffix}".upper() + stage_name = "app_src.stage" # as defined in native-apps-templates/basic + stage_files = runner.invoke_with_connection_json( + ["stage", "list-files", f"{package_name}.{stage_name}"] + ) + for name in contains: + assert contains_row_with(stage_files.json, {"name": name}) + for name in not_contains: + assert not_contains_row_with(stage_files.json, {"name": name}) # Tests a simple flow of executing "snow app deploy [files]", verifying that only the specified files are synced to the stage @@ -181,7 +161,7 @@ def test_nativeapp_deploy_prune( @pytest.mark.parametrize("test_project", ["napp_init_v1", "napp_init_v2"]) def test_nativeapp_deploy_files( test_project, - project_directory, + nativeapp_project_directory, runner, snapshot, print_paths_as_posix, @@ -190,7 +170,7 @@ def test_nativeapp_deploy_files( sanitize_deploy_output, ): project_name = "myapp" - with project_directory(test_project): + with nativeapp_project_directory(test_project): # sync only two specific files to stage result = runner.invoke_with_connection( [ @@ -204,29 +184,15 @@ def test_nativeapp_deploy_files( assert result.exit_code == 0 assert sanitize_deploy_output(result.output) == snapshot - try: - # manifest and script files exist, readme doesn't exist - package_name = ( - f"{project_name}_pkg_{default_username}{resource_suffix}".upper() - ) - stage_name = "app_src.stage" # as defined in native-apps-templates/basic - stage_files = runner.invoke_with_connection_json( - ["stage", "list-files", f"{package_name}.{stage_name}"] - ) - assert contains_row_with(stage_files.json, {"name": "stage/manifest.yml"}) - assert contains_row_with( - stage_files.json, {"name": "stage/setup_script.sql"} - ) - assert not_contains_row_with(stage_files.json, {"name": "stage/README.md"}) - - # make sure we always delete the app - result = runner.invoke_with_connection_json(["app", "teardown"]) - assert result.exit_code == 0 - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 + # manifest and script files exist, readme doesn't exist + package_name = f"{project_name}_pkg_{default_username}{resource_suffix}".upper() + stage_name = "app_src.stage" # as defined in native-apps-templates/basic + stage_files = runner.invoke_with_connection_json( + ["stage", "list-files", f"{package_name}.{stage_name}"] + ) + assert contains_row_with(stage_files.json, {"name": "stage/manifest.yml"}) + assert contains_row_with(stage_files.json, {"name": "stage/setup_script.sql"}) + assert not_contains_row_with(stage_files.json, {"name": "stage/README.md"}) # Tests that files inside of a symlinked directory are deployed @@ -235,7 +201,7 @@ def test_nativeapp_deploy_files( @pytest.mark.parametrize("test_project", ["napp_init_v1", "napp_init_v2"]) def test_nativeapp_deploy_nested_directories( test_project, - project_directory, + nativeapp_project_directory, runner, snapshot, print_paths_as_posix, @@ -244,7 +210,7 @@ def test_nativeapp_deploy_nested_directories( sanitize_deploy_output, ): project_name = "myapp" - with project_directory(test_project): + with nativeapp_project_directory(test_project): # create nested file under app/ touch("app/nested/dir/file.txt") @@ -254,26 +220,14 @@ def test_nativeapp_deploy_nested_directories( assert result.exit_code == 0 assert sanitize_deploy_output(result.output) == snapshot - try: - package_name = ( - f"{project_name}_pkg_{default_username}{resource_suffix}".upper() - ) - stage_name = "app_src.stage" # as defined in native-apps-templates/basic - stage_files = runner.invoke_with_connection_json( - ["stage", "list-files", f"{package_name}.{stage_name}"] - ) - assert contains_row_with( - stage_files.json, {"name": "stage/nested/dir/file.txt"} - ) - - # make sure we always delete the app - result = runner.invoke_with_connection_json(["app", "teardown"]) - assert result.exit_code == 0 - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 + package_name = f"{project_name}_pkg_{default_username}{resource_suffix}".upper() + stage_name = "app_src.stage" # as defined in native-apps-templates/basic + stage_files = runner.invoke_with_connection_json( + ["stage", "list-files", f"{package_name}.{stage_name}"] + ) + assert contains_row_with( + stage_files.json, {"name": "stage/nested/dir/file.txt"} + ) # Tests that deploying a directory recursively syncs all of its contents @@ -282,14 +236,14 @@ def test_nativeapp_deploy_nested_directories( @pytest.mark.parametrize("test_project", ["napp_init_v1", "napp_init_v2"]) def test_nativeapp_deploy_directory( test_project, - project_directory, + nativeapp_project_directory, runner, default_username, resource_suffix, sanitize_deploy_output, ): project_name = "myapp" - with project_directory(test_project): + with nativeapp_project_directory(test_project): touch("app/dir/file.txt") result = runner.invoke_with_connection( ["app", "deploy", "app/dir", "--no-recursive", "--no-validate"] @@ -303,24 +257,12 @@ def test_nativeapp_deploy_directory( ) assert result.exit_code == 0 - try: - package_name = ( - f"{project_name}_pkg_{default_username}{resource_suffix}".upper() - ) - stage_name = "app_src.stage" # as defined in native-apps-templates/basic - stage_files = runner.invoke_with_connection_json( - ["stage", "list-files", f"{package_name}.{stage_name}"] - ) - assert contains_row_with(stage_files.json, {"name": "stage/dir/file.txt"}) - - # make sure we always delete the app - result = runner.invoke_with_connection_json(["app", "teardown"]) - assert result.exit_code == 0 - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 + package_name = f"{project_name}_pkg_{default_username}{resource_suffix}".upper() + stage_name = "app_src.stage" # as defined in native-apps-templates/basic + stage_files = runner.invoke_with_connection_json( + ["stage", "list-files", f"{package_name}.{stage_name}"] + ) + assert contains_row_with(stage_files.json, {"name": "stage/dir/file.txt"}) # Tests that deploying a directory without specifying -r returns an error @@ -329,21 +271,15 @@ def test_nativeapp_deploy_directory( @pytest.mark.parametrize("test_project", ["napp_init_v1", "napp_init_v2"]) def test_nativeapp_deploy_directory_no_recursive( test_project, - project_directory, + nativeapp_project_directory, runner, ): - with project_directory(test_project): - try: - touch("app/nested/dir/file.txt") - result = runner.invoke_with_connection_json( - ["app", "deploy", "app/nested", "--no-validate"] - ) - assert result.exit_code == 1, result.output - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 + with nativeapp_project_directory(test_project): + touch("app/nested/dir/file.txt") + result = runner.invoke_with_connection_json( + ["app", "deploy", "app/nested", "--no-validate"] + ) + assert result.exit_code == 1, result.output # Tests that specifying an unknown path to deploy results in an error @@ -352,21 +288,15 @@ def test_nativeapp_deploy_directory_no_recursive( @pytest.mark.parametrize("test_project", ["napp_init_v1", "napp_init_v2"]) def test_nativeapp_deploy_unknown_path( test_project, - project_directory, + nativeapp_project_directory, runner, ): - with project_directory(test_project): - try: - result = runner.invoke_with_connection_json( - ["app", "deploy", "does_not_exist", "--no-validate"] - ) - assert result.exit_code == 1 - assert "The following path does not exist:" in result.output - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 + with nativeapp_project_directory(test_project): + result = runner.invoke_with_connection_json( + ["app", "deploy", "does_not_exist", "--no-validate"] + ) + assert result.exit_code == 1 + assert "The following path does not exist:" in result.output # Tests that specifying a path with no deploy artifact results in an error @@ -375,21 +305,15 @@ def test_nativeapp_deploy_unknown_path( @pytest.mark.parametrize("test_project", ["napp_init_v1", "napp_init_v2"]) def test_nativeapp_deploy_path_with_no_mapping( test_project, - project_directory, + nativeapp_project_directory, runner, ): - with project_directory(test_project): - try: - result = runner.invoke_with_connection_json( - ["app", "deploy", "snowflake.yml", "--no-validate"] - ) - assert result.exit_code == 1 - assert "No artifact found for" in result.output - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 + with nativeapp_project_directory(test_project): + result = runner.invoke_with_connection_json( + ["app", "deploy", "snowflake.yml", "--no-validate"] + ) + assert result.exit_code == 1 + assert "No artifact found for" in result.output # Tests that specifying a path and pruning result in an error @@ -398,25 +322,19 @@ def test_nativeapp_deploy_path_with_no_mapping( @pytest.mark.parametrize("test_project", ["napp_init_v1", "napp_init_v2"]) def test_nativeapp_deploy_rejects_pruning_when_path_is_specified( test_project, - project_directory, + nativeapp_project_directory, runner, ): - with project_directory(test_project): - try: - os.unlink("app/README.md") - result = runner.invoke_with_connection_json( - ["app", "deploy", "app/README.md", "--prune"] - ) - - assert_that_result_is_usage_error( - result, - "Parameters 'paths' and '--prune' are incompatible and cannot be used simultaneously.", - ) + with nativeapp_project_directory(test_project): + os.unlink("app/README.md") + result = runner.invoke_with_connection_json( + ["app", "deploy", "app/README.md", "--prune"] + ) - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 + assert_that_result_is_usage_error( + result, + "Parameters 'paths' and '--prune' are incompatible and cannot be used simultaneously.", + ) # Tests that specifying a path with no direct mapping falls back to search for prefix matches @@ -427,7 +345,7 @@ def test_nativeapp_deploy_rejects_pruning_when_path_is_specified( ) def test_nativeapp_deploy_looks_for_prefix_matches( test_project, - project_directory, + nativeapp_project_directory, runner, snapshot, print_paths_as_posix, @@ -437,83 +355,72 @@ def test_nativeapp_deploy_looks_for_prefix_matches( ): project_name = "myapp" - with project_directory(test_project): - try: - result = runner.invoke_with_connection(["app", "deploy", "-r", "app"]) - assert result.exit_code == 0 - assert sanitize_deploy_output(result.output) == snapshot - - package_name = ( - f"{project_name}_pkg_{default_username}{resource_suffix}".upper() - ) - stage_name = "app_src.stage" # as defined in native-apps-templates/basic - stage_files = runner.invoke_with_connection_json( - ["stage", "list-files", f"{package_name}.{stage_name}"] - ) - assert contains_row_with(stage_files.json, {"name": "stage/manifest.yml"}) - assert contains_row_with( - stage_files.json, {"name": "stage/setup_script.sql"} - ) - assert contains_row_with(stage_files.json, {"name": "stage/README.md"}) - assert not_contains_row_with( - stage_files.json, {"name": "stage/src/main.py"} - ) - assert not_contains_row_with( - stage_files.json, {"name": "stage/parent-lib/child/c/c.py"} - ) - - result = runner.invoke_with_connection( - ["app", "deploy", "-r", "lib/parent/child/c"] - ) - assert result.exit_code == 0 - stage_files = runner.invoke_with_connection_json( - ["stage", "list-files", f"{package_name}.{stage_name}"] - ) - assert contains_row_with( - stage_files.json, {"name": "stage/parent-lib/child/c/c.py"} - ) - assert not_contains_row_with( - stage_files.json, {"name": "stage/parent-lib/child/a.py"} - ) - assert not_contains_row_with( - stage_files.json, {"name": "stage/parent-lib/child/b.py"} - ) - - result = runner.invoke_with_connection( - ["app", "deploy", "lib/parent/child/a.py"] - ) - assert result.exit_code == 0 - stage_files = runner.invoke_with_connection_json( - ["stage", "list-files", f"{package_name}.{stage_name}"] - ) - assert contains_row_with( - stage_files.json, {"name": "stage/parent-lib/child/c/c.py"} - ) - assert contains_row_with( - stage_files.json, {"name": "stage/parent-lib/child/a.py"} - ) - assert not_contains_row_with( - stage_files.json, {"name": "stage/parent-lib/child/b.py"} - ) - - result = runner.invoke_with_connection(["app", "deploy", "lib", "-r"]) - assert result.exit_code == 0 - stage_files = runner.invoke_with_connection_json( - ["stage", "list-files", f"{package_name}.{stage_name}"] - ) - assert contains_row_with( - stage_files.json, {"name": "stage/parent-lib/child/c/c.py"} - ) - assert contains_row_with( - stage_files.json, {"name": "stage/parent-lib/child/a.py"} - ) - assert contains_row_with( - stage_files.json, {"name": "stage/parent-lib/child/b.py"} - ) - - finally: - result = runner.invoke_with_connection(["app", "teardown", "--force"]) - assert result.exit_code == 0 + with nativeapp_project_directory(test_project): + result = runner.invoke_with_connection(["app", "deploy", "-r", "app"]) + assert result.exit_code == 0 + assert sanitize_deploy_output(result.output) == snapshot + + package_name = f"{project_name}_pkg_{default_username}{resource_suffix}".upper() + stage_name = "app_src.stage" # as defined in native-apps-templates/basic + stage_files = runner.invoke_with_connection_json( + ["stage", "list-files", f"{package_name}.{stage_name}"] + ) + assert contains_row_with(stage_files.json, {"name": "stage/manifest.yml"}) + assert contains_row_with(stage_files.json, {"name": "stage/setup_script.sql"}) + assert contains_row_with(stage_files.json, {"name": "stage/README.md"}) + assert not_contains_row_with(stage_files.json, {"name": "stage/src/main.py"}) + assert not_contains_row_with( + stage_files.json, {"name": "stage/parent-lib/child/c/c.py"} + ) + + result = runner.invoke_with_connection( + ["app", "deploy", "-r", "lib/parent/child/c"] + ) + assert result.exit_code == 0 + stage_files = runner.invoke_with_connection_json( + ["stage", "list-files", f"{package_name}.{stage_name}"] + ) + assert contains_row_with( + stage_files.json, {"name": "stage/parent-lib/child/c/c.py"} + ) + assert not_contains_row_with( + stage_files.json, {"name": "stage/parent-lib/child/a.py"} + ) + assert not_contains_row_with( + stage_files.json, {"name": "stage/parent-lib/child/b.py"} + ) + + result = runner.invoke_with_connection( + ["app", "deploy", "lib/parent/child/a.py"] + ) + assert result.exit_code == 0 + stage_files = runner.invoke_with_connection_json( + ["stage", "list-files", f"{package_name}.{stage_name}"] + ) + assert contains_row_with( + stage_files.json, {"name": "stage/parent-lib/child/c/c.py"} + ) + assert contains_row_with( + stage_files.json, {"name": "stage/parent-lib/child/a.py"} + ) + assert not_contains_row_with( + stage_files.json, {"name": "stage/parent-lib/child/b.py"} + ) + + result = runner.invoke_with_connection(["app", "deploy", "lib", "-r"]) + assert result.exit_code == 0 + stage_files = runner.invoke_with_connection_json( + ["stage", "list-files", f"{package_name}.{stage_name}"] + ) + assert contains_row_with( + stage_files.json, {"name": "stage/parent-lib/child/c/c.py"} + ) + assert contains_row_with( + stage_files.json, {"name": "stage/parent-lib/child/a.py"} + ) + assert contains_row_with( + stage_files.json, {"name": "stage/parent-lib/child/b.py"} + ) # Tests that snow app deploy -r . deploys all changes @@ -522,7 +429,7 @@ def test_nativeapp_deploy_looks_for_prefix_matches( @pytest.mark.parametrize("test_project", ["napp_init_v1", "napp_init_v2"]) def test_nativeapp_deploy_dot( test_project, - project_directory, + nativeapp_project_directory, runner, snapshot, print_paths_as_posix, @@ -531,25 +438,16 @@ def test_nativeapp_deploy_dot( sanitize_deploy_output, ): project_name = "myapp" - with project_directory(test_project): - try: - result = runner.invoke_with_connection(["app", "deploy", "-r", "."]) - assert result.exit_code == 0 - assert sanitize_deploy_output(result.output) == snapshot - - package_name = ( - f"{project_name}_pkg_{default_username}{resource_suffix}".upper() - ) - stage_name = "app_src.stage" # as defined in native-apps-templates/basic - stage_files = runner.invoke_with_connection_json( - ["stage", "list-files", f"{package_name}.{stage_name}"] - ) - assert contains_row_with(stage_files.json, {"name": "stage/manifest.yml"}) - assert contains_row_with( - stage_files.json, {"name": "stage/setup_script.sql"} - ) - assert contains_row_with(stage_files.json, {"name": "stage/README.md"}) - - finally: - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 + with nativeapp_project_directory(test_project): + result = runner.invoke_with_connection(["app", "deploy", "-r", "."]) + assert result.exit_code == 0 + assert sanitize_deploy_output(result.output) == snapshot + + package_name = f"{project_name}_pkg_{default_username}{resource_suffix}".upper() + stage_name = "app_src.stage" # as defined in native-apps-templates/basic + stage_files = runner.invoke_with_connection_json( + ["stage", "list-files", f"{package_name}.{stage_name}"] + ) + assert contains_row_with(stage_files.json, {"name": "stage/manifest.yml"}) + assert contains_row_with(stage_files.json, {"name": "stage/setup_script.sql"}) + assert contains_row_with(stage_files.json, {"name": "stage/README.md"}) diff --git a/tests_integration/nativeapp/test_events.py b/tests_integration/nativeapp/test_events.py index f921db43e4..849ddfde5e 100644 --- a/tests_integration/nativeapp/test_events.py +++ b/tests_integration/nativeapp/test_events.py @@ -39,9 +39,9 @@ ], ) def test_app_events_mutually_exclusive_options( - test_project, runner, project_directory, flag_names, command + test_project, runner, nativeapp_project_directory, flag_names, command ): - with project_directory(test_project): + with nativeapp_project_directory(test_project): # The integration test account doesn't have an event table set up # but this test is still useful to validate the negative case result = runner.invoke_with_connection(["app", "events", *command]) @@ -69,9 +69,9 @@ def test_app_events_mutually_exclusive_options( ], ) def test_app_events_paired_options( - test_project, runner, project_directory, flag_names, command + test_project, runner, nativeapp_project_directory, flag_names, command ): - with project_directory(test_project): + with nativeapp_project_directory(test_project): # The integration test account doesn't have an event table set up # but this test is still useful to validate the negative case result = runner.invoke_with_connection(["app", "events", *command]) @@ -84,8 +84,10 @@ def test_app_events_paired_options( @pytest.mark.integration @enable_definition_v2_feature_flag @pytest.mark.parametrize("test_project", ["napp_init_v1", "napp_init_v2"]) -def test_app_events_reject_invalid_type(test_project, runner, project_directory): - with project_directory(test_project): +def test_app_events_reject_invalid_type( + test_project, runner, nativeapp_project_directory +): + with nativeapp_project_directory(test_project): # The integration test account doesn't have an event table set up # but this test is still useful to validate the negative case result = runner.invoke_with_connection(["app", "events", "--type", "foo"]) diff --git a/tests_integration/nativeapp/test_init_run.py b/tests_integration/nativeapp/test_init_run.py index 4812585df5..d9925b88ff 100644 --- a/tests_integration/nativeapp/test_init_run.py +++ b/tests_integration/nativeapp/test_init_run.py @@ -32,48 +32,36 @@ @pytest.mark.parametrize("test_project", ["napp_init_v1", "napp_init_v2"]) def test_nativeapp_init_run_without_modifications( test_project, - project_directory, + nativeapp_project_directory, runner, snowflake_session, default_username, resource_suffix, ): project_name = "myapp" - with project_directory(test_project): + with nativeapp_project_directory(test_project): result = runner.invoke_with_connection_json(["app", "run"]) assert result.exit_code == 0 - try: - # app + package exist - package_name = ( - f"{project_name}_pkg_{default_username}{resource_suffix}".upper() - ) - app_name = f"{project_name}_{default_username}{resource_suffix}".upper() - assert contains_row_with( - row_from_snowflake_session( - snowflake_session.execute_string( - f"show application packages like '{package_name}'", - ) - ), - dict(name=package_name), - ) - assert contains_row_with( - row_from_snowflake_session( - snowflake_session.execute_string( - f"show applications like '{app_name}'", - ) - ), - dict(name=app_name), - ) - - # make sure we always delete the app - result = runner.invoke_with_connection_json(["app", "teardown"]) - assert result.exit_code == 0 - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 + # app + package exist + package_name = f"{project_name}_pkg_{default_username}{resource_suffix}".upper() + app_name = f"{project_name}_{default_username}{resource_suffix}".upper() + assert contains_row_with( + row_from_snowflake_session( + snowflake_session.execute_string( + f"show application packages like '{package_name}'", + ) + ), + dict(name=package_name), + ) + assert contains_row_with( + row_from_snowflake_session( + snowflake_session.execute_string( + f"show applications like '{app_name}'", + ) + ), + dict(name=app_name), + ) # Tests a simple flow of an existing project, but executing snow app run and teardown, all with distribution=internal @@ -87,6 +75,7 @@ def test_nativeapp_run_existing( snowflake_session, project_definition_files: List[Path], default_username, + nativeapp_teardown, resource_suffix, ): project_name = "integration" @@ -95,7 +84,7 @@ def test_nativeapp_run_existing( result = runner.invoke_with_connection_json(["app", "run"]) assert result.exit_code == 0 - try: + with nativeapp_teardown(): # app + package exist package_name = ( f"{project_name}_pkg_{default_username}{resource_suffix}".upper() @@ -137,15 +126,6 @@ def test_nativeapp_run_existing( {"ECHO": test_string}, ) - # make sure we always delete the app - result = runner.invoke_with_connection_json(["app", "teardown"]) - assert result.exit_code == 0 - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 - # Tests a simple flow of initiating a project, executing snow app run and teardown, all with distribution=internal @pytest.mark.integration @@ -153,48 +133,36 @@ def test_nativeapp_run_existing( @pytest.mark.parametrize("test_project", ["napp_init_v1", "napp_init_v2"]) def test_nativeapp_init_run_handles_spaces( test_project, - project_directory, + nativeapp_project_directory, runner, snowflake_session, default_username, resource_suffix, ): project_name = "myapp" - with project_directory(test_project): + with nativeapp_project_directory(test_project): result = runner.invoke_with_connection_json(["app", "run"]) assert result.exit_code == 0 - try: - # app + package exist - package_name = ( - f"{project_name}_pkg_{default_username}{resource_suffix}".upper() - ) - app_name = f"{project_name}_{default_username}{resource_suffix}".upper() - assert contains_row_with( - row_from_snowflake_session( - snowflake_session.execute_string( - f"show application packages like '{package_name}'", - ) - ), - dict(name=package_name), - ) - assert contains_row_with( - row_from_snowflake_session( - snowflake_session.execute_string( - f"show applications like '{app_name}'", - ) - ), - dict(name=app_name), - ) - - # make sure we always delete the app - result = runner.invoke_with_connection_json(["app", "teardown"]) - assert result.exit_code == 0 - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 + # app + package exist + package_name = f"{project_name}_pkg_{default_username}{resource_suffix}".upper() + app_name = f"{project_name}_{default_username}{resource_suffix}".upper() + assert contains_row_with( + row_from_snowflake_session( + snowflake_session.execute_string( + f"show application packages like '{package_name}'", + ) + ), + dict(name=package_name), + ) + assert contains_row_with( + row_from_snowflake_session( + snowflake_session.execute_string( + f"show applications like '{app_name}'", + ) + ), + dict(name=app_name), + ) # Tests a simple flow of an existing project, but executing snow app run and teardown, all with distribution=external @@ -210,6 +178,7 @@ def test_nativeapp_run_existing_w_external( snowflake_session, project_definition_files: List[Path], default_username, + nativeapp_teardown, resource_suffix, ): project_name = "integration_external" @@ -218,7 +187,7 @@ def test_nativeapp_run_existing_w_external( result = runner.invoke_with_connection_json(["app", "run"]) assert result.exit_code == 0 - try: + with nativeapp_teardown(): # app + package exist package_name = ( f"{project_name}_pkg_{default_username}{resource_suffix}".upper() @@ -273,64 +242,35 @@ def test_nativeapp_run_existing_w_external( {"ECHO": test_string}, ) - # make sure we always delete the app, --force required for external distribution - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 - - expect = snowflake_session.execute_string( - f"show applications like '{app_name}'" - ) - assert not_contains_row_with( - row_from_snowflake_session(expect), {"name": app_name} - ) - - expect = snowflake_session.execute_string( - f"show application packages like '{package_name}'" - ) - assert not_contains_row_with( - row_from_snowflake_session(expect), {"name": package_name} - ) - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 - # Verifies that running "app run" after "app deploy" upgrades the app @pytest.mark.integration @enable_definition_v2_feature_flag @pytest.mark.parametrize("test_project", ["napp_init_v1", "napp_init_v2"]) def test_nativeapp_run_after_deploy( - test_project, project_directory, runner, default_username, resource_suffix + test_project, nativeapp_project_directory, runner, default_username, resource_suffix ): project_name = "myapp" app_name = f"{project_name}_{default_username}{resource_suffix}" stage_fqn = f"{project_name}_pkg_{default_username}{resource_suffix}.app_src.stage" - with project_directory(test_project): - try: - # Run #1 - result = runner.invoke_with_connection_json(["app", "run"]) - assert result.exit_code == 0 - - # Make a change & deploy - with open("app/README.md", "a") as file: - file.write("### Test") - result = runner.invoke_with_connection_json(["app", "deploy"]) - assert result.exit_code == 0 + with nativeapp_project_directory(test_project): + # Run #1 + result = runner.invoke_with_connection_json(["app", "run"]) + assert result.exit_code == 0 - # Run #2 - result = runner.invoke_with_connection_json(["app", "run", "--debug"]) - assert result.exit_code == 0 - assert ( - f"alter application {app_name} upgrade using @{stage_fqn}" - in result.output - ) + # Make a change & deploy + with open("app/README.md", "a") as file: + file.write("### Test") + result = runner.invoke_with_connection_json(["app", "deploy"]) + assert result.exit_code == 0 - finally: - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 + # Run #2 + result = runner.invoke_with_connection_json(["app", "run", "--debug"]) + assert result.exit_code == 0 + assert ( + f"alter application {app_name} upgrade using @{stage_fqn}" in result.output + ) # Tests initialization of a project from a repo with a single template @@ -390,6 +330,7 @@ def test_nativeapp_run_orphan( force_flag, default_username, resource_suffix, + nativeapp_teardown, ): project_name = "integration" project_dir = project_definition_files[0].parent @@ -397,7 +338,7 @@ def test_nativeapp_run_orphan( result = runner.invoke_with_connection_json(["app", "run"]) assert result.exit_code == 0 - try: + with nativeapp_teardown(): # app + package exist package_name = ( f"{project_name}_pkg_{default_username}{resource_suffix}".upper() @@ -479,21 +420,6 @@ def test_nativeapp_run_orphan( dict(name=app_name, source=package_name), ) - # make sure we always delete the app - result = runner.invoke_with_connection_json(["app", "teardown"]) - assert result.exit_code == 0 - - finally: - # manually drop the application in case the test failed and it wasn't dropped - result = runner.invoke_with_connection( - ["sql", "-q", f"drop application if exists {app_name} cascade"] - ) - assert result.exit_code == 0, result.output - - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 - # Verifies that we can always cross-upgrade between different # run configurations as long as we pass the --force flag to "app run" @@ -517,7 +443,7 @@ def test_nativeapp_run_orphan( ) def test_nativeapp_force_cross_upgrade( test_project, - project_directory, + nativeapp_project_directory, run_args_from, run_args_to, runner, @@ -528,36 +454,30 @@ def test_nativeapp_force_cross_upgrade( app_name = f"{project_name}_{default_username}{resource_suffix}" pkg_name = f"{project_name}_pkg_{default_username}{resource_suffix}" - with project_directory(test_project): - try: - # Create version - result = runner.invoke_with_connection(["app", "version", "create", "v1"]) - assert result.exit_code == 0 - - # Set default release directive - result = runner.invoke_with_connection( - [ - "sql", - "-q", - f"alter application package {pkg_name} set default release directive version = v1 patch = 0", - ] - ) - assert result.exit_code == 0 + with nativeapp_project_directory(test_project): + # Create version + result = runner.invoke_with_connection(["app", "version", "create", "v1"]) + assert result.exit_code == 0 - # Initial run - result = runner.invoke_with_connection(["app", "run"] + run_args_from) - assert result.exit_code == 0 + # Set default release directive + result = runner.invoke_with_connection( + [ + "sql", + "-q", + f"alter application package {pkg_name} set default release directive version = v1 patch = 0", + ] + ) + assert result.exit_code == 0 - # (Cross-)upgrade - is_cross_upgrade = run_args_from != run_args_to - result = runner.invoke_with_connection( - ["app", "run"] + run_args_to + ["--force"] - ) - assert result.exit_code == 0 - if is_cross_upgrade: - assert f"Dropping application object {app_name}." in result.output + # Initial run + result = runner.invoke_with_connection(["app", "run"] + run_args_from) + assert result.exit_code == 0 - finally: - # Drop the package - result = runner.invoke_with_connection(["app", "teardown", "--force"]) - assert result.exit_code == 0 + # (Cross-)upgrade + is_cross_upgrade = run_args_from != run_args_to + result = runner.invoke_with_connection( + ["app", "run"] + run_args_to + ["--force"] + ) + assert result.exit_code == 0 + if is_cross_upgrade: + assert f"Dropping application object {app_name}." in result.output diff --git a/tests_integration/nativeapp/test_large_upload.py b/tests_integration/nativeapp/test_large_upload.py index 53fad983f9..126c51e857 100644 --- a/tests_integration/nativeapp/test_large_upload.py +++ b/tests_integration/nativeapp/test_large_upload.py @@ -97,6 +97,5 @@ def test_large_upload_skips_reupload( # make sure our file has been deleted temp_file.unlink(missing_ok=True) - # teardown is idempotent, so we can execute it again with no ill effects result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) assert result.exit_code == 0 diff --git a/tests_integration/nativeapp/test_open.py b/tests_integration/nativeapp/test_open.py index c4e67fd683..91465847f0 100644 --- a/tests_integration/nativeapp/test_open.py +++ b/tests_integration/nativeapp/test_open.py @@ -26,30 +26,24 @@ def test_nativeapp_open( mock_typer_launch, runner, test_project, - project_directory, + nativeapp_project_directory, default_username, resource_suffix, ): project_name = "myapp" app_name = f"{project_name}_{default_username}{resource_suffix}" - with project_directory(test_project): + with nativeapp_project_directory(test_project): result = runner.invoke_with_connection_json(["app", "run"]) assert result.exit_code == 0 - try: - result = runner.invoke_with_connection_json(["app", "open"]) - assert result.exit_code == 0 - assert "Snowflake Native App opened in browser." in result.output - mock_call = mock_typer_launch.call_args_list[0].args[0] - assert re.match( - rf"https://app.snowflake.com/.*#/apps/application/{app_name}", - mock_call, - re.IGNORECASE, - ) + result = runner.invoke_with_connection_json(["app", "open"]) + assert result.exit_code == 0 + assert "Snowflake Native App opened in browser." in result.output - finally: - result = runner.invoke_with_connection_json( - ["app", "teardown", "--force", "--cascade"] - ) - assert result.exit_code == 0 + mock_call = mock_typer_launch.call_args_list[0].args[0] + assert re.match( + rf"https://app.snowflake.com/.*#/apps/application/{app_name}", + mock_call, + re.IGNORECASE, + ) diff --git a/tests_integration/nativeapp/test_post_deploy.py b/tests_integration/nativeapp/test_post_deploy.py index b7aed5486a..fe3f9d4e57 100644 --- a/tests_integration/nativeapp/test_post_deploy.py +++ b/tests_integration/nativeapp/test_post_deploy.py @@ -75,7 +75,7 @@ def test_nativeapp_post_deploy( snowflake_session, default_username, resource_suffix, - project_directory, + nativeapp_project_directory, test_project, is_versioned, with_project_flag, @@ -85,7 +85,7 @@ def test_nativeapp_post_deploy( app_name = f"{project_name}_{default_username}{resource_suffix}" pkg_name = f"{project_name}_pkg_{default_username}{resource_suffix}" - with project_directory(test_project) as tmp_dir: + with nativeapp_project_directory(test_project) as tmp_dir: project_args = ["--project", f"{tmp_dir}"] if with_project_flag else [] version_run_args = ["--version", version] if is_versioned else [] diff --git a/tests_integration/nativeapp/test_project_templating.py b/tests_integration/nativeapp/test_project_templating.py index c0cf13d2ea..89c21fbdb0 100644 --- a/tests_integration/nativeapp/test_project_templating.py +++ b/tests_integration/nativeapp/test_project_templating.py @@ -34,6 +34,7 @@ def test_nativeapp_project_templating_use_env_from_os( snowflake_session, default_username, resource_suffix, + nativeapp_teardown, project_definition_files: List[Path], ): project_name = "integration" @@ -49,7 +50,7 @@ def test_nativeapp_project_templating_use_env_from_os( ) assert result.exit_code == 0 - try: + with nativeapp_teardown(env=local_test_env): # app + package exist package_name = f"{project_name}_{test_ci_env}_pkg_{default_username}{resource_suffix}".upper() app_name = f"{project_name}_{test_ci_env}_{default_username}{resource_suffix}".upper() @@ -89,21 +90,6 @@ def test_nativeapp_project_templating_use_env_from_os( {"ECHO": test_string}, ) - # make sure we always delete the app - result = runner.invoke_with_connection_json( - ["app", "teardown"], - env=local_test_env, - ) - assert result.exit_code == 0 - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json( - ["app", "teardown", "--force"], - env=local_test_env, - ) - assert result.exit_code == 0 - # Tests a simple flow of native app with template reading env variables from OS through an intermediate var @pytest.mark.integration @@ -118,6 +104,7 @@ def test_nativeapp_project_templating_use_env_from_os_through_intermediate_var( snowflake_session, default_username, resource_suffix, + nativeapp_teardown, project_definition_files: List[Path], ): project_name = "integration" @@ -133,7 +120,7 @@ def test_nativeapp_project_templating_use_env_from_os_through_intermediate_var( ) assert result.exit_code == 0 - try: + with nativeapp_teardown(env=local_test_env): # app + package exist package_name = f"{project_name}_{test_ci_env}_pkg_{default_username}{resource_suffix}".upper() app_name = f"{project_name}_{test_ci_env}_{default_username}{resource_suffix}".upper() @@ -173,21 +160,6 @@ def test_nativeapp_project_templating_use_env_from_os_through_intermediate_var( {"ECHO": test_string}, ) - # make sure we always delete the app - result = runner.invoke_with_connection_json( - ["app", "teardown"], - env=local_test_env, - ) - assert result.exit_code == 0 - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json( - ["app", "teardown", "--force"], - env=local_test_env, - ) - assert result.exit_code == 0 - # Tests a simple flow of native app with template reading default env values from project definition file @pytest.mark.integration @@ -202,6 +174,7 @@ def test_nativeapp_project_templating_use_default_env_from_project( snowflake_session, default_username, resource_suffix, + nativeapp_teardown, project_definition_files: List[Path], ): project_name = "integration" @@ -217,7 +190,7 @@ def test_nativeapp_project_templating_use_default_env_from_project( ) assert result.exit_code == 0 - try: + with nativeapp_teardown(env=local_test_env): # app + package exist package_name = f"{project_name}_{default_ci_env}_pkg_{default_username}{resource_suffix}".upper() app_name = f"{project_name}_{default_ci_env}_{default_username}{resource_suffix}".upper() @@ -257,21 +230,6 @@ def test_nativeapp_project_templating_use_default_env_from_project( {"ECHO": test_string}, ) - # make sure we always delete the app - result = runner.invoke_with_connection_json( - ["app", "teardown"], - env=local_test_env, - ) - assert result.exit_code == 0 - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json( - ["app", "teardown", "--force"], - env=local_test_env, - ) - assert result.exit_code == 0 - # Tests a native app with --env parameter through command line overwriting values from os env and project definition filetemplate reading env var @pytest.mark.integration @@ -286,6 +244,7 @@ def test_nativeapp_project_templating_use_env_from_cli_as_highest_priority( snowflake_session, default_username, resource_suffix, + nativeapp_teardown, project_definition_files: List[Path], ): project_name = "integration" @@ -303,7 +262,7 @@ def test_nativeapp_project_templating_use_env_from_cli_as_highest_priority( ) assert result.exit_code == 0 - try: + with nativeapp_teardown(env=local_test_env): # app + package exist package_name = f"{project_name}_{expected_value}_pkg_{default_username}{resource_suffix}".upper() app_name = f"{project_name}_{expected_value}_{default_username}{resource_suffix}".upper() @@ -350,14 +309,6 @@ def test_nativeapp_project_templating_use_env_from_cli_as_highest_priority( ) assert result.exit_code == 0 - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json( - ["app", "teardown", "--env", f"CI_ENV={expected_value}", "--force"], - env=local_test_env, - ) - assert result.exit_code == 0 - # Tests that other native app commands still succeed with templating @pytest.mark.integration @@ -369,6 +320,7 @@ def test_nativeapp_project_templating_use_env_from_cli_as_highest_priority( ) def test_nativeapp_project_templating_bundle_deploy_successful( runner, + nativeapp_teardown, project_definition_files: List[Path], ): project_dir = project_definition_files[0].parent @@ -377,7 +329,7 @@ def test_nativeapp_project_templating_bundle_deploy_successful( local_test_env = {"CI_ENV": test_ci_env, "APP_DIR": "app"} with pushd(project_dir): - try: + with nativeapp_teardown(env=local_test_env): result = runner.invoke_json( ["app", "bundle"], env=local_test_env, @@ -389,10 +341,3 @@ def test_nativeapp_project_templating_bundle_deploy_successful( env=local_test_env, ) assert result.exit_code == 0 - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json( - ["app", "teardown", "--force"], - env=local_test_env, - ) - assert result.exit_code == 0 diff --git a/tests_integration/nativeapp/test_teardown.py b/tests_integration/nativeapp/test_teardown.py index d3e945008e..8da5e650ae 100644 --- a/tests_integration/nativeapp/test_teardown.py +++ b/tests_integration/nativeapp/test_teardown.py @@ -45,7 +45,7 @@ def test_nativeapp_teardown_cascade( expected_error, orphan_app, test_project, - project_directory, + nativeapp_project_directory, runner, snowflake_session, default_username, @@ -55,7 +55,7 @@ def test_nativeapp_teardown_cascade( app_name = f"{project_name}_{default_username}{resource_suffix}".upper() db_name = f"{project_name}_db_{default_username}{resource_suffix}".upper() - with project_directory(test_project): + with nativeapp_project_directory(test_project): # Replacing the static DB name with a unique one to avoid collisions between tests with open("app/setup_script.sql", "r") as file: setup_script_content = file.read() @@ -67,78 +67,68 @@ def test_nativeapp_teardown_cascade( result = runner.invoke_with_connection_json(["app", "run"]) assert result.exit_code == 0 - try: - # Grant permission to create databases - snowflake_session.execute_string( - f"grant create database on account to application {app_name}", - ) - - # Create the database - snowflake_session.execute_string("use warehouse xsmall") - snowflake_session.execute_string( - f"call {app_name}.core.create_db()", - ) - - # Verify the database is owned by the app - assert contains_row_with( - row_from_snowflake_session( - snowflake_session.execute_string(f"show databases like '{db_name}'") - ), - dict(name=db_name, owner=app_name), - ) - - if orphan_app: - # orphan the app by dropping the application package, - # this causes future `show objects owned by application` queries to fail - # and `snow app teardown` needs to be resilient against this - package_name = ( - f"{project_name}_pkg_{default_username}{resource_suffix}".upper() - ) - snowflake_session.execute_string( - f"drop application package {package_name}" - ) - assert not_contains_row_with( - row_from_snowflake_session( - snowflake_session.execute_string( - f"show application packages like '{package_name}'", - ) - ), - dict(name=package_name), - ) + # Grant permission to create databases + snowflake_session.execute_string( + f"grant create database on account to application {app_name}", + ) - # Run the teardown command - result = runner.invoke_with_connection_json(command.split()) - if expected_error is not None: - assert result.exit_code == 1 - assert expected_error in result.output - return + # Create the database + snowflake_session.execute_string("use warehouse xsmall") + snowflake_session.execute_string( + f"call {app_name}.core.create_db()", + ) - assert result.exit_code == 0 + # Verify the database is owned by the app + assert contains_row_with( + row_from_snowflake_session( + snowflake_session.execute_string(f"show databases like '{db_name}'") + ), + dict(name=db_name, owner=app_name), + ) - # Verify the database is dropped - assert not_contains_row_with( - row_from_snowflake_session( - snowflake_session.execute_string(f"show databases like '{db_name}'") - ), - dict(name=db_name, owner=app_name), + if orphan_app: + # orphan the app by dropping the application package, + # this causes future `show objects owned by application` queries to fail + # and `snow app teardown` needs to be resilient against this + package_name = ( + f"{project_name}_pkg_{default_username}{resource_suffix}".upper() ) - - # Verify the app is dropped + snowflake_session.execute_string(f"drop application package {package_name}") assert not_contains_row_with( row_from_snowflake_session( snowflake_session.execute_string( - f"show applications like '{app_name}'", + f"show application packages like '{package_name}'", ) ), - dict(name=app_name), + dict(name=package_name), ) - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json( - ["app", "teardown", "--force", "--cascade"] - ) - assert result.exit_code == 0 + # Run the teardown command + result = runner.invoke_with_connection_json(command.split()) + if expected_error is not None: + assert result.exit_code == 1 + assert expected_error in result.output + return + + assert result.exit_code == 0 + + # Verify the database is dropped + assert not_contains_row_with( + row_from_snowflake_session( + snowflake_session.execute_string(f"show databases like '{db_name}'") + ), + dict(name=db_name, owner=app_name), + ) + + # Verify the app is dropped + assert not_contains_row_with( + row_from_snowflake_session( + snowflake_session.execute_string( + f"show applications like '{app_name}'", + ) + ), + dict(name=app_name), + ) @pytest.mark.integration @@ -151,32 +141,25 @@ def test_nativeapp_teardown_unowned_app( resource_suffix, force, test_project, - project_directory, + nativeapp_project_directory, ): project_name = "myapp" app_name = f"{project_name}_{default_username}{resource_suffix}" - with project_directory(test_project): + with nativeapp_project_directory(test_project): result = runner.invoke_with_connection_json(["app", "run"]) assert result.exit_code == 0 - try: - result = runner.invoke_with_connection_json( - ["sql", "-q", f"alter application {app_name} set comment = 'foo'"] - ) - assert result.exit_code == 0 - - if force: - result = runner.invoke_with_connection_json( - ["app", "teardown", "--force"] - ) - assert result.exit_code == 0 - else: - result = runner.invoke_with_connection_json(["app", "teardown"]) - assert result.exit_code == 1 + result = runner.invoke_with_connection_json( + ["sql", "-q", f"alter application {app_name} set comment = 'foo'"] + ) + assert result.exit_code == 0 - finally: + if force: result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) assert result.exit_code == 0 + else: + result = runner.invoke_with_connection_json(["app", "teardown"]) + assert result.exit_code == 1 @pytest.mark.integration @@ -189,47 +172,42 @@ def test_nativeapp_teardown_pkg_versions( resource_suffix, default_release_directive, test_project, - project_directory, + nativeapp_project_directory, ): project_name = "myapp" pkg_name = f"{project_name}_pkg_{default_username}{resource_suffix}" - with project_directory(test_project): + with nativeapp_project_directory(test_project): result = runner.invoke_with_connection(["app", "version", "create", "v1"]) assert result.exit_code == 0 - try: - # when setting a release directive, we will not have the ability to drop the version later - if default_release_directive: - result = runner.invoke_with_connection( - [ - "sql", - "-q", - f"alter application package {pkg_name} set default release directive version = v1 patch = 0", - ] - ) - assert result.exit_code == 0 - - # try to teardown; fail because we have a version - result = runner.invoke_with_connection(["app", "teardown"]) - assert result.exit_code == 1 - assert f"Drop versions first, or use --force to override." in result.output + # when setting a release directive, we will not have the ability to drop the version later + if default_release_directive: + result = runner.invoke_with_connection( + [ + "sql", + "-q", + f"alter application package {pkg_name} set default release directive version = v1 patch = 0", + ] + ) + assert result.exit_code == 0 - teardown_args = [] - if not default_release_directive: - # if we didn't set a release directive, we can drop the version and try again - result = runner.invoke_with_connection( - ["app", "version", "drop", "v1", "--force"] - ) - assert result.exit_code == 0 - else: - # if we did set a release directive, we need --force for teardown to work - teardown_args = ["--force"] + # try to teardown; fail because we have a version + result = runner.invoke_with_connection(["app", "teardown"]) + assert result.exit_code == 1 + assert f"Drop versions first, or use --force to override." in result.output - # either way, we can now tear down the application package - result = runner.invoke_with_connection(["app", "teardown"] + teardown_args) + teardown_args = [] + if not default_release_directive: + # if we didn't set a release directive, we can drop the version and try again + result = runner.invoke_with_connection( + ["app", "version", "drop", "v1", "--force"] + ) assert result.exit_code == 0 + else: + # if we did set a release directive, we need --force for teardown to work + teardown_args = ["--force"] - finally: - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 + # either way, we can now tear down the application package + result = runner.invoke_with_connection(["app", "teardown"] + teardown_args) + assert result.exit_code == 0 diff --git a/tests_integration/nativeapp/test_validate.py b/tests_integration/nativeapp/test_validate.py index 21f0ca8dfe..50084469c6 100644 --- a/tests_integration/nativeapp/test_validate.py +++ b/tests_integration/nativeapp/test_validate.py @@ -21,35 +21,25 @@ @pytest.mark.integration @enable_definition_v2_feature_flag @pytest.mark.parametrize("test_project", ["napp_init_v1", "napp_init_v2"]) -def test_nativeapp_validate(test_project, project_directory, runner): - with project_directory(test_project): - try: - # validate the app's setup script - result = runner.invoke_with_connection(["app", "validate"]) - assert result.exit_code == 0, result.output - assert "Native App validation succeeded." in result.output - finally: - result = runner.invoke_with_connection(["app", "teardown", "--force"]) - assert result.exit_code == 0, result.output +def test_nativeapp_validate(test_project, nativeapp_project_directory, runner): + with nativeapp_project_directory(test_project): + # validate the app's setup script + result = runner.invoke_with_connection(["app", "validate"]) + assert result.exit_code == 0, result.output + assert "Native App validation succeeded." in result.output @pytest.mark.integration @enable_definition_v2_feature_flag @pytest.mark.parametrize("test_project", ["napp_init_v1", "napp_init_v2"]) -def test_nativeapp_validate_failing(test_project, project_directory, runner): - with project_directory(test_project): +def test_nativeapp_validate_failing(test_project, nativeapp_project_directory, runner): + with nativeapp_project_directory(test_project): # Create invalid SQL file Path("app/setup_script.sql").write_text("Lorem ipsum dolor sit amet") - try: - # validate the app's setup script, this will fail - # because we include an empty file - result = runner.invoke_with_connection(["app", "validate"]) - assert result.exit_code == 1, result.output - assert ( - "Snowflake Native App setup script failed validation." in result.output - ) - assert "syntax error" in result.output - finally: - result = runner.invoke_with_connection(["app", "teardown", "--force"]) - assert result.exit_code == 0, result.output + # validate the app's setup script, this will fail + # because we include an empty file + result = runner.invoke_with_connection(["app", "validate"]) + assert result.exit_code == 1, result.output + assert "Snowflake Native App setup script failed validation." in result.output + assert "syntax error" in result.output diff --git a/tests_integration/nativeapp/test_version.py b/tests_integration/nativeapp/test_version.py index bb35ffb84b..18859a705e 100644 --- a/tests_integration/nativeapp/test_version.py +++ b/tests_integration/nativeapp/test_version.py @@ -35,6 +35,7 @@ def test_nativeapp_version_create_and_drop( snowflake_session, default_username, resource_suffix, + nativeapp_teardown, project_definition_files: List[Path], ): project_name = "integration" @@ -45,7 +46,7 @@ def test_nativeapp_version_create_and_drop( ) assert result_create.exit_code == 0 - try: + with nativeapp_teardown(): # package exist package_name = ( f"{project_name}_pkg_{default_username}{resource_suffix}".upper() @@ -73,22 +74,6 @@ def test_nativeapp_version_create_and_drop( actual = runner.invoke_with_connection_json(["app", "version", "list"]) assert len(actual.json) == 0 - # make sure we always delete the package - result = runner.invoke_with_connection_json(["app", "teardown"]) - assert result.exit_code == 0 - - expect = snowflake_session.execute_string( - f"show application packages like '{package_name}'" - ) - assert not_contains_row_with( - row_from_snowflake_session(expect), {"name": package_name} - ) - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 - # Tests upgrading an app from an existing loose files installation to versioned installation. @pytest.mark.integration @@ -101,6 +86,7 @@ def test_nativeapp_upgrade( snowflake_session, default_username, resource_suffix, + nativeapp_teardown, project_definition_files: List[Path], ): project_name = "integration" @@ -111,7 +97,7 @@ def test_nativeapp_upgrade( ["app", "version", "create", "v1", "--force", "--skip-git-check"] ) - try: + with nativeapp_teardown(): # package exist package_name = ( f"{project_name}_pkg_{default_username}{resource_suffix}".upper() @@ -139,15 +125,6 @@ def test_nativeapp_upgrade( ["app", "version", "drop", "v1", "--force"] ) - # make sure we always delete the package - result = runner.invoke_with_connection_json(["app", "teardown"]) - assert result.exit_code == 0 - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 - # Make sure we can create 3+ patches on the same version @pytest.mark.integration @@ -158,12 +135,13 @@ def test_nativeapp_version_create_3_patches( snowflake_session, default_username, resource_suffix, + nativeapp_teardown, project_definition_files: List[Path], ): project_name = "integration" project_dir = project_definition_files[0].parent with pushd(project_dir): - try: + with nativeapp_teardown(): package_name = ( f"{project_name}_pkg_{default_username}{resource_suffix}".upper() ) @@ -195,22 +173,6 @@ def test_nativeapp_version_create_3_patches( actual = runner.invoke_with_connection_json(["app", "version", "list"]) assert len(actual.json) == 0 - # make sure we always delete the package - result = runner.invoke_with_connection_json(["app", "teardown"]) - assert result.exit_code == 0 - - expect = snowflake_session.execute_string( - f"show application packages like '{package_name}'" - ) - assert not_contains_row_with( - row_from_snowflake_session(expect), {"name": package_name} - ) - - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 - @pytest.mark.integration @enable_definition_v2_feature_flag @@ -222,12 +184,13 @@ def test_nativeapp_version_create_patch_is_integer( snowflake_session, default_username, resource_suffix, + nativeapp_teardown, project_definition_files: List[Path], ): project_name = "integration" project_dir = project_definition_files[0].parent with pushd(project_dir): - try: + with nativeapp_teardown(): package_name = ( f"{project_name}_pkg_{default_username}{resource_suffix}".upper() ) @@ -282,21 +245,6 @@ def test_nativeapp_version_create_patch_is_integer( actual = runner.invoke_with_connection_json(["app", "version", "list"]) assert len(actual.json) == 0 - # make sure we always delete the package - result = runner.invoke_with_connection_json(["app", "teardown"]) - assert result.exit_code == 0 - - expect = snowflake_session.execute_string( - f"show application packages like '{package_name}'" - ) - assert not_contains_row_with( - row_from_snowflake_session(expect), {"name": package_name} - ) - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0 - # Tests creating a version for a package that was not created by the CLI # (doesn't have the magic CLI comment) @@ -310,6 +258,7 @@ def test_nativeapp_version_create_package_no_magic_comment( snowflake_session, default_username, resource_suffix, + nativeapp_teardown, snapshot, project_definition_files: List[Path], ): @@ -319,7 +268,7 @@ def test_nativeapp_version_create_package_no_magic_comment( result_create_abort = runner.invoke_with_connection_json(["app", "deploy"]) assert result_create_abort.exit_code == 0 - try: + with nativeapp_teardown(): # package exist package_name = ( f"{project_name}_pkg_{default_username}{resource_suffix}".upper() @@ -384,7 +333,3 @@ def test_nativeapp_version_create_package_no_magic_comment( # Remove date field row.pop("created_on", None) assert actual.json == snapshot - finally: - # teardown is idempotent, so we can execute it again with no ill effects - result = runner.invoke_with_connection_json(["app", "teardown", "--force"]) - assert result.exit_code == 0