From f6894593c127ad1d1765b8c838951511cbd4d853 Mon Sep 17 00:00:00 2001 From: Adam Stus Date: Fri, 17 Jan 2025 12:44:12 +0100 Subject: [PATCH] Added snow spcs compute-pool deploy command --- RELEASE-NOTES.md | 2 + .../_plugins/spcs/compute_pool/commands.py | 59 ++++- .../spcs/compute_pool/compute_pool_entity.py | 8 + .../compute_pool/compute_pool_entity_model.py | 23 ++ .../cli/_plugins/spcs/compute_pool/manager.py | 35 ++- .../api/project/schemas/entities/entities.py | 8 + tests/__snapshots__/test_help_messages.ambr | 99 ++++++++ tests/spcs/test_compute_pool.py | 228 +++++++++++++++--- .../projects/spcs_compute_pool/snowflake.yml | 13 + .../spcs_multiple_compute_pools/snowflake.yml | 33 +++ tests_integration/spcs/test_compute_pool.py | 36 +++ .../spcs/testing_utils/compute_pool_utils.py | 30 +++ .../projects/spcs_compute_pool/snowflake.yml | 13 + 13 files changed, 555 insertions(+), 32 deletions(-) create mode 100644 src/snowflake/cli/_plugins/spcs/compute_pool/compute_pool_entity.py create mode 100644 src/snowflake/cli/_plugins/spcs/compute_pool/compute_pool_entity_model.py create mode 100644 tests/test_data/projects/spcs_compute_pool/snowflake.yml create mode 100644 tests/test_data/projects/spcs_multiple_compute_pools/snowflake.yml create mode 100644 tests_integration/test_data/projects/spcs_compute_pool/snowflake.yml diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 652a168144..9657a2f181 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -47,6 +47,8 @@ * Use `--all` to fetch all columns. * Added support for glob pattern (except `**`) in artifact paths in snowflake.yml for Streamlit. * Added support for glob pattern (except `**`) in artifact paths in snowflake.yml for Snowpark, requires ENABLE_SNOWPARK_GLOB_SUPPORT feature flag. +* Added `--replace` flag to `snow spcs compute-pool create` command. +* Added command `snow spcs compute-pool deploy`. ## Fixes and improvements diff --git a/src/snowflake/cli/_plugins/spcs/compute_pool/commands.py b/src/snowflake/cli/_plugins/spcs/compute_pool/commands.py index a236e06f05..6916b7b633 100644 --- a/src/snowflake/cli/_plugins/spcs/compute_pool/commands.py +++ b/src/snowflake/cli/_plugins/spcs/compute_pool/commands.py @@ -14,10 +14,10 @@ from __future__ import annotations -from typing import Optional +from typing import Dict, Optional import typer -from click import ClickException +from click import ClickException, UsageError from snowflake.cli._plugins.object.command_aliases import ( add_object_command_aliases, ) @@ -25,17 +25,29 @@ from snowflake.cli._plugins.spcs.common import ( validate_and_set_instances, ) +from snowflake.cli._plugins.spcs.compute_pool.compute_pool_entity_model import ( + ComputePoolEntityModel, +) from snowflake.cli._plugins.spcs.compute_pool.manager import ComputePoolManager +from snowflake.cli.api.cli_global_context import get_cli_context +from snowflake.cli.api.commands.decorators import with_project_definition from snowflake.cli.api.commands.flags import ( IfNotExistsOption, OverrideableOption, + ReplaceOption, + entity_argument, identifier_argument, like_option, ) from snowflake.cli.api.commands.snow_typer import SnowTyperFactory from snowflake.cli.api.constants import ObjectType +from snowflake.cli.api.exceptions import NoProjectDefinitionError from snowflake.cli.api.identifiers import FQN -from snowflake.cli.api.output.types import CommandResult, SingleQueryResult +from snowflake.cli.api.output.types import ( + CommandResult, + MessageResult, + SingleQueryResult, +) from snowflake.cli.api.project.util import is_valid_object_name app = SnowTyperFactory( @@ -124,6 +136,7 @@ def create( ), auto_suspend_secs: int = AutoSuspendSecsOption(), comment: Optional[str] = CommentOption(help=_COMMENT_HELP), + replace: bool = ReplaceOption(), if_not_exists: bool = IfNotExistsOption(), **options, ) -> CommandResult: @@ -141,10 +154,50 @@ def create( auto_suspend_secs=auto_suspend_secs, comment=comment, if_not_exists=if_not_exists, + replace=replace, ) return SingleQueryResult(cursor) +@app.command("deploy", requires_connection=True) +@with_project_definition() +def deploy( + replace: bool = ReplaceOption( + help="Replace the compute-pool if it already exists." + ), + entity_id: str = entity_argument("compute-pool"), + **options, +): + """ + Deploys a compute pool from the project definition file. + """ + cli_context = get_cli_context() + pd = cli_context.project_definition + compute_pools: Dict[str, ComputePoolEntityModel] = pd.get_entities_by_type( + entity_type="compute-pool" + ) + + if not compute_pools: + raise NoProjectDefinitionError( + project_type="compute-pool", project_root=cli_context.project_root + ) + + if entity_id and entity_id not in compute_pools: + raise UsageError(f"No '{entity_id}' entity in project definition file.") + + if len(compute_pools.keys()) == 1: + entity_id = list(compute_pools.keys())[0] + + if entity_id is None: + raise UsageError( + "Multiple compute-pools found. Please provide entity id for the operation." + ) + + ComputePoolManager().deploy(compute_pool=compute_pools[entity_id], replace=replace) + + return MessageResult(f"Compute pool '{entity_id}' successfully deployed.") + + @app.command("stop-all", requires_connection=True) def stop_all(name: FQN = ComputePoolNameArgument, **options) -> CommandResult: """ diff --git a/src/snowflake/cli/_plugins/spcs/compute_pool/compute_pool_entity.py b/src/snowflake/cli/_plugins/spcs/compute_pool/compute_pool_entity.py new file mode 100644 index 0000000000..d0488d609d --- /dev/null +++ b/src/snowflake/cli/_plugins/spcs/compute_pool/compute_pool_entity.py @@ -0,0 +1,8 @@ +from snowflake.cli._plugins.spcs.compute_pool.compute_pool_entity_model import ( + ComputePoolEntityModel, +) +from snowflake.cli.api.entities.common import EntityBase + + +class ComputePoolEntity(EntityBase[ComputePoolEntityModel]): + pass diff --git a/src/snowflake/cli/_plugins/spcs/compute_pool/compute_pool_entity_model.py b/src/snowflake/cli/_plugins/spcs/compute_pool/compute_pool_entity_model.py new file mode 100644 index 0000000000..45854a496c --- /dev/null +++ b/src/snowflake/cli/_plugins/spcs/compute_pool/compute_pool_entity_model.py @@ -0,0 +1,23 @@ +from typing import Literal, Optional + +from pydantic import Field +from snowflake.cli.api.project.schemas.entities.common import EntityModelBase +from snowflake.cli.api.project.schemas.updatable_model import DiscriminatorField + + +class ComputePoolEntityModel(EntityModelBase): + type: Literal["compute-pool"] = DiscriminatorField() # noqa: A003 + min_nodes: Optional[int] = Field(title="Minimum number of nodes", default=None) + max_nodes: Optional[int] = Field(title="Maximum number of nodes", default=None) + instance_family: str = Field(title="Name of the instance family", default=None) + auto_resume: Optional[bool] = Field( + title="The compute pool will automatically resume when a service or job is submitted to it", + default=True, + ) + initially_suspended: Optional[bool] = Field( + title="Starts the compute pool in a suspended state", default=False + ) + auto_suspend_seconds: Optional[int] = Field( + title="Number of seconds of inactivity after which you want Snowflake to automatically suspend the compute pool", + default=3600, + ) diff --git a/src/snowflake/cli/_plugins/spcs/compute_pool/manager.py b/src/snowflake/cli/_plugins/spcs/compute_pool/manager.py index 5a4d4e3ae4..5380bc6de4 100644 --- a/src/snowflake/cli/_plugins/spcs/compute_pool/manager.py +++ b/src/snowflake/cli/_plugins/spcs/compute_pool/manager.py @@ -16,12 +16,17 @@ from typing import List, Optional +from snowflake.cli._plugins.object.manager import ObjectManager from snowflake.cli._plugins.spcs.common import ( NoPropertiesProvidedError, handle_object_already_exists, strip_empty_lines, ) +from snowflake.cli._plugins.spcs.compute_pool.compute_pool_entity_model import ( + ComputePoolEntityModel, +) from snowflake.cli.api.constants import ObjectType +from snowflake.cli.api.identifiers import FQN from snowflake.cli.api.sql_execution import SqlExecutionMixin from snowflake.connector.cursor import SnowflakeCursor from snowflake.connector.errors import ProgrammingError @@ -39,7 +44,17 @@ def create( auto_suspend_secs: int, comment: Optional[str], if_not_exists: bool, + replace: bool, ) -> SnowflakeCursor: + + if replace: + object_manager = ObjectManager() + object_type = str(ObjectType.COMPUTE_POOL) + entity_id_fqn = FQN.from_string(pool_name) + if object_manager.object_exists(object_type=object_type, fqn=entity_id_fqn): + self.stop(pool_name) + object_manager.drop(object_type=object_type, fqn=entity_id_fqn) + create_statement = "CREATE COMPUTE POOL" if if_not_exists: create_statement = f"{create_statement} IF NOT EXISTS" @@ -58,7 +73,25 @@ def create( try: return self.execute_query(strip_empty_lines(query)) except ProgrammingError as e: - handle_object_already_exists(e, ObjectType.COMPUTE_POOL, pool_name) + handle_object_already_exists( + e, ObjectType.COMPUTE_POOL, pool_name, replace_available=True + ) + + def deploy( + self, compute_pool: ComputePoolEntityModel, replace: bool + ) -> SnowflakeCursor: + return self.create( + pool_name=compute_pool.entity_id, + min_nodes=compute_pool.min_nodes, + max_nodes=compute_pool.max_nodes, + instance_family=compute_pool.instance_family, + auto_resume=compute_pool.auto_resume, + initially_suspended=compute_pool.initially_suspended, + auto_suspend_secs=compute_pool.auto_suspend_seconds, + comment=None, + if_not_exists=False, + replace=replace, + ) def stop(self, pool_name: str) -> SnowflakeCursor: return self.execute_query(f"alter compute pool {pool_name} stop all") diff --git a/src/snowflake/cli/api/project/schemas/entities/entities.py b/src/snowflake/cli/api/project/schemas/entities/entities.py index 008cbc0db2..4260e78227 100644 --- a/src/snowflake/cli/api/project/schemas/entities/entities.py +++ b/src/snowflake/cli/api/project/schemas/entities/entities.py @@ -32,6 +32,12 @@ FunctionEntityModel, ProcedureEntityModel, ) +from snowflake.cli._plugins.spcs.compute_pool.compute_pool_entity import ( + ComputePoolEntity, +) +from snowflake.cli._plugins.spcs.compute_pool.compute_pool_entity_model import ( + ComputePoolEntityModel, +) from snowflake.cli._plugins.streamlit.streamlit_entity import StreamlitEntity from snowflake.cli._plugins.streamlit.streamlit_entity_model import ( StreamlitEntityModel, @@ -43,6 +49,7 @@ StreamlitEntity, ProcedureEntity, FunctionEntity, + ComputePoolEntity, ] EntityModel = Union[ ApplicationEntityModel, @@ -50,6 +57,7 @@ StreamlitEntityModel, FunctionEntityModel, ProcedureEntityModel, + ComputePoolEntityModel, ] ALL_ENTITIES: List[Entity] = [*get_args(Entity)] diff --git a/tests/__snapshots__/test_help_messages.ambr b/tests/__snapshots__/test_help_messages.ambr index d7ed85a125..fbacf75750 100644 --- a/tests/__snapshots__/test_help_messages.ambr +++ b/tests/__snapshots__/test_help_messages.ambr @@ -5662,6 +5662,9 @@ | --comment TEXT Comment for the | | compute pool. | | [default: None] | + | --replace Replace this | + | object if it | + | already exists. | | --if-not-exists Only apply this | | operation if the | | specified object | @@ -5744,6 +5747,100 @@ +------------------------------------------------------------------------------+ + ''' +# --- +# name: test_help_messages[spcs.compute-pool.deploy] + ''' + + Usage: default spcs compute-pool deploy [OPTIONS] [ENTITY_ID] + + Deploys a compute pool from the project definition file. + + +- Arguments ------------------------------------------------------------------+ + | entity_id [ENTITY_ID] ID of compute-pool entity. | + | [default: None] | + +------------------------------------------------------------------------------+ + +- Options --------------------------------------------------------------------+ + | --replace Replace the compute-pool if it already exists. | + | --project -p TEXT Path where Snowflake project resides. Defaults to | + | current working directory. | + | --env TEXT String in format of key=value. Overrides variables | + | from env section used for templates. | + | --help -h Show this message and exit. | + +------------------------------------------------------------------------------+ + +- Connection configuration ---------------------------------------------------+ + | --connection,--environment -c TEXT Name of the connection, as | + | defined in your config.toml | + | file. Default: default. | + | --host TEXT Host address for the | + | connection. Overrides the | + | value specified for the | + | connection. | + | --port INTEGER Port for the connection. | + | Overrides the value | + | specified for the | + | connection. | + | --account,--accountname TEXT Name assigned to your | + | Snowflake account. Overrides | + | the value specified for the | + | connection. | + | --user,--username TEXT Username to connect to | + | Snowflake. Overrides the | + | value specified for the | + | connection. | + | --password TEXT Snowflake password. | + | Overrides the value | + | specified for the | + | connection. | + | --authenticator TEXT Snowflake authenticator. | + | Overrides the value | + | specified for the | + | connection. | + | --private-key-file,--privateā€¦ TEXT Snowflake private key file | + | path. Overrides the value | + | specified for the | + | connection. | + | --token-file-path TEXT Path to file with an OAuth | + | token that should be used | + | when connecting to Snowflake | + | --database,--dbname TEXT Database to use. Overrides | + | the value specified for the | + | connection. | + | --schema,--schemaname TEXT Database schema to use. | + | Overrides the value | + | specified for the | + | connection. | + | --role,--rolename TEXT Role to use. Overrides the | + | value specified for the | + | connection. | + | --warehouse TEXT Warehouse to use. Overrides | + | the value specified for the | + | connection. | + | --temporary-connection -x Uses connection defined with | + | command line parameters, | + | instead of one defined in | + | config | + | --mfa-passcode TEXT Token to use for | + | multi-factor authentication | + | (MFA) | + | --enable-diag Run Python connector | + | diagnostic test | + | --diag-log-path TEXT Diagnostic report path | + | --diag-allowlist-path TEXT Diagnostic report path to | + | optional allowlist | + +------------------------------------------------------------------------------+ + +- Global configuration -------------------------------------------------------+ + | --format [TABLE|JSON] Specifies the output format. | + | [default: TABLE] | + | --verbose -v Displays log entries for log levels info | + | and higher. | + | --debug Displays log entries for log levels debug | + | and higher; debug logs contain additional | + | information. | + | --silent Turns off intermediate output to console. | + +------------------------------------------------------------------------------+ + + ''' # --- # name: test_help_messages[spcs.compute-pool.describe] @@ -6603,6 +6700,7 @@ +------------------------------------------------------------------------------+ +- Commands -------------------------------------------------------------------+ | create Creates a new compute pool. | + | deploy Deploys a compute pool from the project definition file. | | describe Provides description of compute pool. | | drop Drops compute pool with given name. | | list Lists all available compute pools. | @@ -11113,6 +11211,7 @@ +------------------------------------------------------------------------------+ +- Commands -------------------------------------------------------------------+ | create Creates a new compute pool. | + | deploy Deploys a compute pool from the project definition file. | | describe Provides description of compute pool. | | drop Drops compute pool with given name. | | list Lists all available compute pools. | diff --git a/tests/spcs/test_compute_pool.py b/tests/spcs/test_compute_pool.py index 10e066f2b0..2bf0204f07 100644 --- a/tests/spcs/test_compute_pool.py +++ b/tests/spcs/test_compute_pool.py @@ -13,7 +13,8 @@ # limitations under the License. import json -from unittest.mock import Mock, patch +from textwrap import dedent +from unittest.mock import Mock, call, patch import pytest from click import ClickException @@ -27,6 +28,7 @@ from snowflake.cli.api.constants import ObjectType from snowflake.cli.api.identifiers import FQN from snowflake.cli.api.project.util import to_string_literal +from snowflake.connector import ProgrammingError from snowflake.connector.cursor import SnowflakeCursor from tests.spcs.test_common import SPCS_OBJECT_EXISTS_ERROR @@ -34,10 +36,12 @@ assert_that_result_is_successful_and_executed_successfully, ) - -@patch( +EXECUTE_QUERY = ( "snowflake.cli._plugins.spcs.compute_pool.manager.ComputePoolManager.execute_query" ) + + +@patch(EXECUTE_QUERY) def test_create(mock_execute_query): pool_name = "test_pool" min_nodes = 2 @@ -58,6 +62,7 @@ def test_create(mock_execute_query): initially_suspended=initially_suspended, auto_suspend_secs=auto_suspend_secs, comment=comment, + replace=False, if_not_exists=False, ) expected_query = " ".join( @@ -99,6 +104,7 @@ def test_create_pool_cli_defaults(mock_create, runner): initially_suspended=False, auto_suspend_secs=3600, comment=None, + replace=False, if_not_exists=False, ) @@ -136,13 +142,12 @@ def test_create_pool_cli(mock_create, runner): initially_suspended=True, auto_suspend_secs=7200, comment=to_string_literal("this is a test"), + replace=False, if_not_exists=True, ) -@patch( - "snowflake.cli._plugins.spcs.compute_pool.manager.ComputePoolManager.execute_query" -) +@patch(EXECUTE_QUERY) @patch("snowflake.cli._plugins.spcs.compute_pool.manager.handle_object_already_exists") def test_create_compute_pool_already_exists(mock_handle, mock_execute): pool_name = "test_pool" @@ -156,16 +161,18 @@ def test_create_compute_pool_already_exists(mock_handle, mock_execute): initially_suspended=True, auto_suspend_secs=7200, comment=to_string_literal("this is a test"), + replace=False, if_not_exists=False, ) mock_handle.assert_called_once_with( - SPCS_OBJECT_EXISTS_ERROR, ObjectType.COMPUTE_POOL, pool_name + SPCS_OBJECT_EXISTS_ERROR, + ObjectType.COMPUTE_POOL, + pool_name, + replace_available=True, ) -@patch( - "snowflake.cli._plugins.spcs.compute_pool.manager.ComputePoolManager.execute_query" -) +@patch(EXECUTE_QUERY) def test_create_compute_pool_if_not_exists(mock_execute_query): cursor = Mock(spec=SnowflakeCursor) mock_execute_query.return_value = cursor @@ -178,6 +185,7 @@ def test_create_compute_pool_if_not_exists(mock_execute_query): initially_suspended=False, auto_suspend_secs=3600, comment=None, + replace=False, if_not_exists=True, ) expected_query = " ".join( @@ -196,9 +204,183 @@ def test_create_compute_pool_if_not_exists(mock_execute_query): assert result == cursor -@patch( - "snowflake.cli._plugins.spcs.compute_pool.manager.ComputePoolManager.execute_query" -) +@patch("snowflake.cli._plugins.object.manager.ObjectManager.execute_query") +@patch(EXECUTE_QUERY) +def test_create_compute_pool_replace( + mock_execute_query, mock_execute_query_object_manager, runner +): + compute_pool_name = "test_pool" + + result = runner.invoke( + [ + "spcs", + "compute-pool", + "create", + compute_pool_name, + "--replace", + "--family", + "test_family", + ] + ) + + assert result.exit_code == 0, result.output + expected_query = dedent( + f"""\ + CREATE COMPUTE POOL {compute_pool_name} + MIN_NODES = 1 + MAX_NODES = 1 + INSTANCE_FAMILY = test_family + AUTO_RESUME = True + INITIALLY_SUSPENDED = False + AUTO_SUSPEND_SECS = 3600""" + ) + mock_execute_query.assert_has_calls( + [call(f"alter compute pool {compute_pool_name} stop all"), call(expected_query)] + ) + mock_execute_query_object_manager.assert_has_calls( + [ + call(f"describe compute pool IDENTIFIER('{compute_pool_name}')"), + call(f"drop compute pool IDENTIFIER('{compute_pool_name}')"), + ] + ) + + +@patch(EXECUTE_QUERY) +def test_deploy(mock_execute_query, runner, project_directory): + with project_directory("spcs_compute_pool"): + result = runner.invoke(["spcs", "compute-pool", "deploy"]) + + assert result.exit_code == 0, result.output + assert ( + "Compute pool 'test_compute_pool' successfully deployed." in result.output + ) + expected_query = dedent( + """\ + CREATE COMPUTE POOL test_compute_pool + MIN_NODES = 1 + MAX_NODES = 2 + INSTANCE_FAMILY = CPU_X64_XS + AUTO_RESUME = True + INITIALLY_SUSPENDED = True + AUTO_SUSPEND_SECS = 60""" + ) + mock_execute_query.assert_called_once_with(expected_query) + + +@patch("snowflake.cli._plugins.object.manager.ObjectManager.execute_query") +@patch(EXECUTE_QUERY) +def test_deploy_replace( + mock_execute_query, mock_execute_query_object_manager, runner, project_directory +): + compute_pool_name = "test_compute_pool" + + with project_directory("spcs_compute_pool"): + result = runner.invoke(["spcs", "compute-pool", "deploy", "--replace"]) + + assert result.exit_code == 0, result.output + assert ( + f"Compute pool '{compute_pool_name}' successfully deployed." + in result.output + ) + expected_query = dedent( + f"""\ + CREATE COMPUTE POOL {compute_pool_name} + MIN_NODES = 1 + MAX_NODES = 2 + INSTANCE_FAMILY = CPU_X64_XS + AUTO_RESUME = True + INITIALLY_SUSPENDED = True + AUTO_SUSPEND_SECS = 60""" + ) + mock_execute_query.assert_has_calls( + [ + call(f"alter compute pool {compute_pool_name} stop all"), + call(expected_query), + ] + ) + mock_execute_query_object_manager.assert_has_calls( + [ + call(f"describe compute pool IDENTIFIER('{compute_pool_name}')"), + call(f"drop compute pool IDENTIFIER('{compute_pool_name}')"), + ] + ) + + +@patch(EXECUTE_QUERY) +def test_deploy_compute_pool_already_exists( + mock_execute_query, runner, project_directory +): + mock_execute_query.side_effect = ProgrammingError( + errno=2002, msg="Object 'test_compute_pool' already exists." + ) + + with project_directory("spcs_compute_pool"): + result = runner.invoke(["spcs", "compute-pool", "deploy"]) + + assert result.exit_code == 1, result.output + assert ( + "Compute-pool TEST_COMPUTE_POOL already exists. Use --replace flag to update" + in result.output + ) + + +def test_deploy_no_compute_pools(runner, project_directory): + with project_directory("empty_project"): + result = runner.invoke(["spcs", "compute-pool", "deploy"]) + + assert result.exit_code == 1, result.output + assert "No compute-pool project definition found in" in result.output + + +def test_deploy_not_existing_entity_id(runner, project_directory): + with project_directory("spcs_compute_pool"): + result = runner.invoke( + ["spcs", "compute-pool", "deploy", "not-existing-entity-id"] + ) + + assert result.exit_code == 2, result.output + assert ( + "No 'not-existing-entity-id' entity in project definition file." + in result.output + ) + + +@patch(EXECUTE_QUERY) +def test_deploy_multiple_compute_pools_with_entity_id( + mock_execute_query, runner, project_directory +): + with project_directory("spcs_multiple_compute_pools"): + result = runner.invoke(["spcs", "compute-pool", "deploy", "test_compute_pool"]) + + assert result.exit_code == 0, result.output + assert ( + "Compute pool 'test_compute_pool' successfully deployed." in result.output + ) + expected_query = dedent( + """\ + CREATE COMPUTE POOL test_compute_pool + MIN_NODES = 1 + MAX_NODES = 2 + INSTANCE_FAMILY = CPU_X64_XS + AUTO_RESUME = True + INITIALLY_SUSPENDED = True + AUTO_SUSPEND_SECS = 60""" + ) + mock_execute_query.assert_called_once_with(expected_query) + + +def test_deploy_multiple_compute_pools(runner, project_directory): + with project_directory("spcs_multiple_compute_pools"): + result = runner.invoke(["spcs", "compute-pool", "deploy"]) + + assert result.exit_code == 2, result.output + assert ( + "Multiple compute-pools found. Please provide entity id for the operation." + in result.output + ) + + +@patch(EXECUTE_QUERY) def test_stop(mock_execute_query): pool_name = "test_pool" cursor = Mock(spec=SnowflakeCursor) @@ -209,9 +391,7 @@ def test_stop(mock_execute_query): assert result == cursor -@patch( - "snowflake.cli._plugins.spcs.compute_pool.manager.ComputePoolManager.execute_query" -) +@patch(EXECUTE_QUERY) def test_suspend(mock_execute_query): pool_name = "test_pool" cursor = Mock(spec=SnowflakeCursor) @@ -246,9 +426,7 @@ def test_suspend_cli(mock_suspend, mock_cursor, runner): assert result_json_parsed == {"status": "Statement executed successfully."} -@patch( - "snowflake.cli._plugins.spcs.compute_pool.manager.ComputePoolManager.execute_query" -) +@patch(EXECUTE_QUERY) def test_resume(mock_execute_query): pool_name = "test_pool" cursor = Mock(spec=SnowflakeCursor) @@ -300,9 +478,7 @@ def test_compute_pool_name_callback_invalid(mock_is_valid): assert "is not a valid compute pool name." in e.value.message -@patch( - "snowflake.cli._plugins.spcs.compute_pool.manager.ComputePoolManager.execute_query" -) +@patch(EXECUTE_QUERY) def test_set_property(mock_execute_query): pool_name = "test_pool" min_nodes = 2 @@ -401,9 +577,7 @@ def test_set_property_no_properties_cli(mock_set, runner): ) -@patch( - "snowflake.cli._plugins.spcs.compute_pool.manager.ComputePoolManager.execute_query" -) +@patch(EXECUTE_QUERY) def test_unset_property(mock_execute_query): pool_name = "test_pool" cursor = Mock(spec=SnowflakeCursor) @@ -476,9 +650,7 @@ def test_unset_property_with_args(runner): assert "Got unexpected extra argument" in result.output -@patch( - "snowflake.cli._plugins.spcs.compute_pool.manager.ComputePoolManager.execute_query" -) +@patch(EXECUTE_QUERY) def test_status(mock_execute_query): pool_name = "test_pool" cursor = Mock(spec=SnowflakeCursor) diff --git a/tests/test_data/projects/spcs_compute_pool/snowflake.yml b/tests/test_data/projects/spcs_compute_pool/snowflake.yml new file mode 100644 index 0000000000..2a5418de29 --- /dev/null +++ b/tests/test_data/projects/spcs_compute_pool/snowflake.yml @@ -0,0 +1,13 @@ +definition_version: "2" + +entities: + test_compute_pool: + type: compute-pool + identifier: + name: test_compute_pool + min_nodes: 1 + max_nodes: 2 + instance_family: CPU_X64_XS + auto_resume: true + initially_suspended: true + auto_suspend_seconds: 60 diff --git a/tests/test_data/projects/spcs_multiple_compute_pools/snowflake.yml b/tests/test_data/projects/spcs_multiple_compute_pools/snowflake.yml new file mode 100644 index 0000000000..c4b1e8f456 --- /dev/null +++ b/tests/test_data/projects/spcs_multiple_compute_pools/snowflake.yml @@ -0,0 +1,33 @@ +definition_version: "2" + +entities: + test_compute_pool: + type: compute-pool + identifier: + name: test_compute_pool + min_nodes: 1 + max_nodes: 2 + instance_family: CPU_X64_XS + auto_resume: true + initially_suspended: true + auto_suspend_seconds: 60 + test_compute_pool_2: + type: compute-pool + identifier: + name: test_compute_pool_2 + min_nodes: 1 + max_nodes: 2 + instance_family: CPU_X64_XS + auto_resume: true + initially_suspended: true + auto_suspend_seconds: 60 + test_compute_pool_3: + type: compute-pool + identifier: + name: test_compute_pool + min_nodes: 1 + max_nodes: 2 + instance_family: CPU_X64_XS + auto_resume: true + initially_suspended: true + auto_suspend_seconds: 60 diff --git a/tests_integration/spcs/test_compute_pool.py b/tests_integration/spcs/test_compute_pool.py index 6f4c8494ac..f9228d39cc 100644 --- a/tests_integration/spcs/test_compute_pool.py +++ b/tests_integration/spcs/test_compute_pool.py @@ -40,6 +40,42 @@ def test_compute_pool(_test_steps: Tuple[ComputePoolTestSteps, str]): test_steps.list_should_not_return_compute_pool(compute_pool_name) +@pytest.mark.integration +def test_compute_pool_deploy( + _test_steps: Tuple[ComputePoolTestSteps, str], + project_directory, + alter_snowflake_yml, +): + + test_steps, compute_pool_name = _test_steps + + with project_directory("spcs_compute_pool"): + alter_snowflake_yml( + "snowflake.yml", + "entities", + { + compute_pool_name: { + "type": "compute-pool", + "identifier": { + "name": compute_pool_name, + }, + "min_nodes": 1, + "max_nodes": 1, + "instance_family": "CPU_X64_XS", + "auto_resume": True, + "initially_suspended": True, + "auto_suspend_seconds": 60, + } + }, + ) + test_steps.deploy_compute_pool(compute_pool_name) + test_steps.list_should_return_compute_pool(compute_pool_name) + test_steps.second_deploy_should_fail() + test_steps.deploy_compute_pool_with_replace(compute_pool_name) + test_steps.drop_compute_pool(compute_pool_name) + test_steps.list_should_not_return_compute_pool(compute_pool_name) + + @pytest.fixture def _test_setup(runner, snowflake_session): compute_pool_test_setup = ComputePoolTestSetup( diff --git a/tests_integration/spcs/testing_utils/compute_pool_utils.py b/tests_integration/spcs/testing_utils/compute_pool_utils.py index 3bd0432c4d..9cc3df8e74 100644 --- a/tests_integration/spcs/testing_utils/compute_pool_utils.py +++ b/tests_integration/spcs/testing_utils/compute_pool_utils.py @@ -18,12 +18,14 @@ import pytest from snowflake.connector import SnowflakeConnection +from snowflake.cli.api.output.types import CommandResult from tests_integration.conftest import SnowCLIRunner from tests_integration.test_utils import contains_row_with, not_contains_row_with from tests_integration.testing_utils.assertions.test_result_assertions import ( assert_that_result_is_successful_and_executed_successfully, assert_that_result_is_successful_and_output_json_contains, assert_that_result_is_successful_and_output_json_equals, + assert_that_result_failed_with_message_containing, ) @@ -63,6 +65,34 @@ def create_compute_pool(self, compute_pool_name: str) -> None: in result.json["status"] # type: ignore ) + def deploy_compute_pool(self, compute_pool_name: str) -> None: + result = self._deploy_compute_pool(replace=False) + assert_that_result_is_successful_and_output_json_contains( + result, + {"message": f"Compute pool '{compute_pool_name}' successfully deployed."}, + ) + + def deploy_compute_pool_with_replace(self, compute_pool_name: str) -> None: + result = self._deploy_compute_pool(replace=True) + assert_that_result_is_successful_and_output_json_contains( + result, + {"message": f"Compute pool '{compute_pool_name}' successfully deployed."}, + ) + + def second_deploy_should_fail(self) -> None: + result = self._deploy_compute_pool(replace=False) + assert_that_result_failed_with_message_containing(result, "already exists") + + def _deploy_compute_pool(self, replace: bool) -> CommandResult: + params = [ + "spcs", + "compute-pool", + "deploy", + ] + if replace: + params.append("--replace") + return self._setup.runner.invoke_with_connection_json(params) + def list_should_return_compute_pool(self, compute_pool_name) -> None: result = self._execute_list() assert_that_result_is_successful_and_output_json_contains( diff --git a/tests_integration/test_data/projects/spcs_compute_pool/snowflake.yml b/tests_integration/test_data/projects/spcs_compute_pool/snowflake.yml new file mode 100644 index 0000000000..afda15b73e --- /dev/null +++ b/tests_integration/test_data/projects/spcs_compute_pool/snowflake.yml @@ -0,0 +1,13 @@ +definition_version: "2" + +entities: + test_compute_pool: + type: compute-pool + identifier: + name: test_compute_pool + min_nodes: 1 + max_nodes: 1 + instance_family: CPU_X64_XS + auto_resume: true + initially_suspended: true + auto_suspend_seconds: 60