diff --git a/.github/workflows/test_fork.yaml b/.github/workflows/test_fork.yaml index 14f051d50b..4be72291a2 100644 --- a/.github/workflows/test_fork.yaml +++ b/.github/workflows/test_fork.yaml @@ -66,6 +66,7 @@ jobs: SNOWFLAKE_CONNECTIONS_INTEGRATION_AUTHENTICATOR: SNOWFLAKE_JWT SNOWFLAKE_CONNECTIONS_INTEGRATION_USER: ${{ secrets.SNOWFLAKE_USER }} SNOWFLAKE_CONNECTIONS_INTEGRATION_ACCOUNT: ${{ secrets.SNOWFLAKE_ACCOUNT }} + SNOWFLAKE_CONNECTIONS_INTEGRATION_DATABASE: ${{ secrets.SNOWFLAKE_DATABASE }} SNOWFLAKE_CONNECTIONS_INTEGRATION_PRIVATE_KEY_PATH: ${{ env.PRIVATE_KEY_PATH }} run: python -m hatch run ${{ inputs.hatch-run }} diff --git a/.github/workflows/test_trusted.yaml b/.github/workflows/test_trusted.yaml index 7357abd88e..36503d921d 100644 --- a/.github/workflows/test_trusted.yaml +++ b/.github/workflows/test_trusted.yaml @@ -66,5 +66,6 @@ jobs: SNOWFLAKE_CONNECTIONS_INTEGRATION_AUTHENTICATOR: SNOWFLAKE_JWT SNOWFLAKE_CONNECTIONS_INTEGRATION_USER: ${{ secrets.SNOWFLAKE_USER }} SNOWFLAKE_CONNECTIONS_INTEGRATION_ACCOUNT: ${{ secrets.SNOWFLAKE_ACCOUNT }} + SNOWFLAKE_CONNECTIONS_INTEGRATION_DATABASE: ${{ secrets.SNOWFLAKE_DATABASE }} SNOWFLAKE_CONNECTIONS_INTEGRATION_PRIVATE_KEY_PATH: ${{ env.PRIVATE_KEY_PATH }} run: python -m hatch run ${{ inputs.hatch-run }} diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 5474ab3b92..224ce20bcd 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -69,40 +69,45 @@ or by running `pytest` inside activated environment. Every integration test should have `integration` mark. By default, integration tests are not execute when running `pytest`. To execute only integration tests run `hatch run integration:test` or `pytest -m integration` inside environment. -### Connection parameters in `config.toml` -Add the following connection to your `config.toml` +### User setup -```toml -[connections.integration] -host = -account = -user = -password = -``` +Integration tests require environment variables to be set up. Parameters must use the following format: -### Connection parameters in environment parameters +``SNOWFLAKE_CONNECTIONS_INTEGRATION_=`` -Parameters must use the following format: +where ```` is the name of the key. The following environment variables are required: -``SNOWFLAKE_CONNECTIONS_INTEGRATION_=`` +- `SNOWFLAKE_CONNECTIONS_INTEGRATION_HOST` +- `SNOWFLAKE_CONNECTIONS_INTEGRATION_ACCOUNT` +- `SNOWFLAKE_CONNECTIONS_INTEGRATION_USER` +- `SNOWFLAKE_CONNECTIONS_INTEGRATION_PASSWORD` or `SNOWFLAKE_CONNECTIONS_INTEGRATION_PRIVATE_KEY_PATH` (if using private key authentication `SNOWFLAKE_CONNECTIONS_INTEGRATION_PASSWORD=SNOWFLAKE_JWT` should be set) +- `SNOWFLAKE_CONNECTIONS_INTEGRATION_ROLE` +- `SNOWFLAKE_CONNECTIONS_INTEGRATION_DATABASE` +- `SNOWFLAKE_CONNECTIONS_INTEGRATION_WAREHOUSE` -where ```` is the name of the key +### Integration account setup script -For example: SNOWFLAKE_CONNECTIONS_INTEGRATION_ACCOUNT="my-account" +To set up an account for integration tests, run the following script with `ACCOUNTADMIN` role: -List of required parameter keys: -- host -- account -- user -- password +```bash +snow sql \ + -f tests_integration/scripts/integration_account_setup.sql \ + -D "user=${SNOWFLAKE_CONNECTIONS_INTEGRATION_USER}" \ + -D "role=${SNOWFLAKE_CONNECTIONS_INTEGRATION_ROLE}" \ + -D "warehouse=${SNOWFLAKE_CONNECTIONS_INTEGRATION_WAREHOUSE}" \ + -D "main_database=${SNOWFLAKE_CONNECTIONS_INTEGRATION_DATABASE}"\ + -c +``` -### User setup +Note: Before running the script, set up your environment variables. + +### Build and push Docker images -Run the script with ACCOUNTADMIN role +To build and push all required Docker images, run the following script: ```bash -tests_integration/scripts/integration_account_setup.sql +./tests_integration/spcs/docker/build_and_push_all.sh ``` ## Remote debugging with PyCharm or IntelliJ diff --git a/scripts/cleanup.py b/scripts/cleanup.py index d08ed975a9..6363aaabeb 100644 --- a/scripts/cleanup.py +++ b/scripts/cleanup.py @@ -50,7 +50,7 @@ def remove_resources(single: str, plural: str, known_instances: t.List[str], rol if __name__ == "__main__": - role = "INTEGRATION_TESTS" + role = os.getenv("SNOWFLAKE_CONNECTIONS_INTEGRATION_ROLE", "INTEGRATION_TESTS") config = { "authenticator": "SNOWFLAKE_JWT", "account": os.getenv("SNOWFLAKE_CONNECTIONS_INTEGRATION_ACCOUNT"), @@ -64,7 +64,7 @@ def remove_resources(single: str, plural: str, known_instances: t.List[str], rol update_connection_details_with_private_key(config) session = Session.builder.configs(config).create() - session.use_role("INTEGRATION_TESTS") + session.use_role(role) known_objects: t.Dict[t.Tuple[str, str], t.List[str]] = { ("database", "databases"): [ diff --git a/tests_integration/config/connection_configs.toml b/tests_integration/config/connection_configs.toml index 96cff333d0..e3a8cd4204 100644 --- a/tests_integration/config/connection_configs.toml +++ b/tests_integration/config/connection_configs.toml @@ -16,4 +16,3 @@ [connections.integration] authenticator = "SNOWFLAKE_JWT" schema = "public" -role = "INTEGRATION_TESTS" diff --git a/tests_integration/config/world_readable.toml b/tests_integration/config/world_readable.toml index 8ffa81e9ef..11f7cb7f0d 100644 --- a/tests_integration/config/world_readable.toml +++ b/tests_integration/config/world_readable.toml @@ -18,4 +18,3 @@ [connections.default] [connections.integration] schema = "public" -role = "INTEGRATION_TESTS" diff --git a/tests_integration/scripts/integration_account_setup.sql b/tests_integration/scripts/integration_account_setup.sql index e4384ad817..cec032fdeb 100644 --- a/tests_integration/scripts/integration_account_setup.sql +++ b/tests_integration/scripts/integration_account_setup.sql @@ -13,98 +13,107 @@ See the License for the specific language governing permissions and limitations under the License. */ - -SET INT_TEST_USER = 'SNOWCLI_TEST'; -CREATE USER IF NOT EXISTS IDENTIFIER($INT_TEST_USER); +CREATE USER IF NOT EXISTS IDENTIFIER('&{ user }'); -- BASE SETUP -CREATE ROLE IF NOT EXISTS INTEGRATION_TESTS; -GRANT CREATE ROLE ON ACCOUNT TO ROLE INTEGRATION_TESTS; -GRANT CREATE DATABASE ON ACCOUNT TO ROLE INTEGRATION_TESTS; -GRANT CREATE COMPUTE POOL ON ACCOUNT TO ROLE INTEGRATION_TESTS; -GRANT BIND SERVICE ENDPOINT ON ACCOUNT TO ROLE INTEGRATION_TESTS; -GRANT CREATE APPLICATION PACKAGE ON ACCOUNT TO ROLE INTEGRATION_TESTS; -GRANT CREATE APPLICATION ON ACCOUNT TO ROLE INTEGRATION_TESTS; -GRANT CREATE DATABASE ON ACCOUNT TO ROLE INTEGRATION_TESTS WITH GRANT OPTION; -GRANT CREATE WAREHOUSE ON ACCOUNT TO ROLE INTEGRATION_TESTS; -GRANT ROLE INTEGRATION_TESTS TO USER IDENTIFIER($INT_TEST_USER); +CREATE ROLE IF NOT EXISTS &{ role }; +GRANT CREATE ROLE ON ACCOUNT TO ROLE &{ role }; +GRANT CREATE DATABASE ON ACCOUNT TO ROLE &{ role }; +GRANT CREATE COMPUTE POOL ON ACCOUNT TO ROLE &{ role }; +GRANT BIND SERVICE ENDPOINT ON ACCOUNT TO ROLE &{ role }; +GRANT CREATE APPLICATION PACKAGE ON ACCOUNT TO ROLE &{ role }; +GRANT CREATE APPLICATION ON ACCOUNT TO ROLE &{ role }; +GRANT CREATE DATABASE ON ACCOUNT TO ROLE &{ role } WITH GRANT OPTION; +GRANT CREATE WAREHOUSE ON ACCOUNT TO ROLE &{ role }; +GRANT ROLE &{ role } TO USER IDENTIFIER('&{ user }'); -- WAREHOUSE SETUP -CREATE WAREHOUSE IF NOT EXISTS XSMALL WAREHOUSE_SIZE=XSMALL; -GRANT ALL ON WAREHOUSE XSMALL TO ROLE INTEGRATION_TESTS; +CREATE WAREHOUSE IF NOT EXISTS &{ warehouse } WAREHOUSE_SIZE=XSMALL; +GRANT ALL ON WAREHOUSE &{ warehouse } TO ROLE &{ role }; --- DATABASES SETUP -CREATE DATABASE IF NOT EXISTS SNOWCLI_DB; -GRANT ALL ON DATABASE SNOWCLI_DB TO ROLE INTEGRATION_TESTS; -GRANT ALL ON SCHEMA SNOWCLI_DB.PUBLIC TO ROLE INTEGRATION_TESTS; +-- MAIN DATABASES SETUP +CREATE DATABASE IF NOT EXISTS &{ main_database }; +GRANT ALL ON DATABASE &{ main_database } TO ROLE &{ role }; +GRANT ALL ON SCHEMA &{ main_database }.PUBLIC TO ROLE &{ role }; +USE DATABASE &{ main_database }; -- STAGES SETUP -CREATE STAGE IF NOT EXISTS SNOWCLI_DB.PUBLIC.SNOWCLI_STAGE DIRECTORY = ( ENABLE = TRUE ); +CREATE STAGE IF NOT EXISTS &{ main_database }.PUBLIC.SNOWCLI_STAGE DIRECTORY = ( ENABLE = TRUE ); -- CONTAINERS SETUP -CREATE OR REPLACE IMAGE REPOSITORY SNOWCLI_DB.PUBLIC.SNOWCLI_REPOSITORY; -GRANT READ, WRITE ON IMAGE REPOSITORY SNOWCLI_DB.PUBLIC.SNOWCLI_REPOSITORY TO ROLE INTEGRATION_TESTS; +CREATE IMAGE REPOSITORY IF NOT EXISTS &{ main_database }.PUBLIC.SNOWCLI_REPOSITORY; +GRANT READ, WRITE ON IMAGE REPOSITORY &{ main_database }.PUBLIC.SNOWCLI_REPOSITORY TO ROLE &{ role }; -CREATE COMPUTE POOL IF NOT EXISTS SNOWCLI_COMPUTE_POOL - MIN_NODES = 1 - MAX_NODES = 1 - INSTANCE_FAMILY = CPU_X64_XS; +CREATE COMPUTE POOL IF NOT EXISTS snowcli_compute_pool + MIN_NODES = 1 + MAX_NODES = 1 + INSTANCE_FAMILY = CPU_X64_XS; -GRANT USAGE ON COMPUTE POOL SNOWCLI_COMPUTE_POOL TO ROLE INTEGRATION_TESTS; -GRANT MONITOR ON COMPUTE POOL SNOWCLI_COMPUTE_POOL TO ROLE INTEGRATION_TESTS; +GRANT USAGE ON COMPUTE POOL snowcli_compute_pool TO ROLE &{ role }; +GRANT MONITOR ON COMPUTE POOL snowcli_compute_pool TO ROLE &{ role }; -ALTER COMPUTE POOL SNOWCLI_COMPUTE_POOL SUSPEND; +ALTER COMPUTE POOL snowcli_compute_pool SUSPEND; -- EXTERNAL ACCESS INTEGRATION -CREATE OR REPLACE NETWORK RULE snowflake_docs_network_rule +CREATE NETWORK RULE IF NOT EXISTS snowflake_docs_network_rule MODE = EGRESS TYPE = HOST_PORT VALUE_LIST = ('docs.snowflake.com'); -CREATE OR REPLACE SECRET test_secret +CREATE SECRET IF NOT EXISTS test_secret TYPE = GENERIC_STRING --- SECRET_STRING = ''; -- provide password -GRANT READ ON SECRET test_secret TO ROLE integration_tests; + SECRET_STRING = 'test'; -- provide password +GRANT READ ON SECRET test_secret TO ROLE &{ role }; + +CREATE EXTERNAL ACCESS INTEGRATION IF NOT EXISTS snowflake_docs_access_integration + ALLOWED_NETWORK_RULES = (snowflake_docs_network_rule) + ALLOWED_AUTHENTICATION_SECRETS = (test_secret) + ENABLED = true; +GRANT USAGE ON INTEGRATION snowflake_docs_access_integration TO ROLE &{ role }; -CREATE OR REPLACE EXTERNAL ACCESS INTEGRATION snowflake_docs_access_integration +CREATE EXTERNAL ACCESS INTEGRATION IF NOT EXISTS cli_test_integration ALLOWED_NETWORK_RULES = (snowflake_docs_network_rule) ALLOWED_AUTHENTICATION_SECRETS = (test_secret) ENABLED = true; -GRANT USAGE ON INTEGRATION snowflake_docs_access_integration TO ROLE integration_tests; +GRANT USAGE ON INTEGRATION cli_test_integration TO ROLE &{ role }; -- API INTEGRATION FOR SNOWGIT -CREATE API INTEGRATION snowcli_testing_repo_api_integration -API_PROVIDER = git_https_api -API_ALLOWED_PREFIXES = ('https://github.com/snowflakedb/') -ALLOWED_AUTHENTICATION_SECRETS = () -ENABLED = true; -GRANT USAGE ON INTEGRATION snowcli_testing_repo_api_integration TO ROLE INTEGRATION_TESTS; - --- Notebooks setup -CREATE DATABASE NOTEBOOK; - --- CORTEX SEARCH SETUP UNCOMMENT THIS WHEN ENABLING CORTEX INTEGRATION TESTS --- CREATE TABLE transcripts ( --- transcript_text VARCHAR, --- region VARCHAR, --- agent_id VARCHAR --- ); --- --- INSERT INTO transcripts VALUES('Ah, I see you have the machine that goes "ping!". This is my favourite.', 'Meaning of Life', '01'), --- ('First shalt thou take out the Holy Pin. Then shalt thou count to three, no more, no less.', 'Holy Grail', '02'), --- ('And the beast shall be huge and black, and the eyes thereof red with the blood of living creatures', 'Life of Brian', '03'), --- ('This parrot is no more! It has ceased to be! It`s expired and gone to meet its maker!', 'Flying Circus', '04'); --- --- CREATE OR REPLACE CORTEX SEARCH SERVICE test_service --- ON transcript_text --- ATTRIBUTES region --- WAREHOUSE = mywh --- TARGET_LAG = '1 day' --- AS ( --- SELECT --- transcript_text, --- region, --- agent_id --- FROM support_transcripts --- ); --- END OF CORTEX SETUP - THIS LINE CAN BE DELETED AFTER UNCOMMENTING ABOVE CODE WHEN ENABLING CORTEX TESTS +CREATE API INTEGRATION IF NOT EXISTS snowcli_testing_repo_api_integration + API_PROVIDER = git_https_api + API_ALLOWED_PREFIXES = ('https://github.com/snowflakedb/') + ALLOWED_AUTHENTICATION_SECRETS = () + ENABLED = true; +GRANT USAGE ON INTEGRATION snowcli_testing_repo_api_integration TO ROLE &{ role }; + +-- NOTEBOOKS SETUP +CREATE DATABASE IF NOT EXISTS NOTEBOOK; + +-- CORTEX SEARCH SETUP +CREATE TABLE IF NOT EXISTS transcripts ( + transcript_text VARCHAR, + region VARCHAR, + agent_id VARCHAR +); + +-- INSERT IF NOT EXISTS +MERGE INTO transcripts AS t USING ( + VALUES('Ah, I see you have the machine that goes "ping!". This is my favourite.', 'Meaning of Life', '01'), + ('First shalt thou take out the Holy Pin. Then shalt thou count to three, no more, no less.', 'Holy Grail', '02'), + ('And the beast shall be huge and black, and the eyes thereof red with the blood of living creatures', 'Life of Brian', '03'), + ('This parrot is no more! It has ceased to be! It`s expired and gone to meet its maker!', 'Flying Circus', '04') + ) AS s (c1, c2, c3) ON t.agent_id = s.c3 + WHEN NOT MATCHED THEN + INSERT (transcript_text, region, agent_id) VALUES (s.c1, s.c2, s.c3); + +CREATE CORTEX SEARCH SERVICE IF NOT EXISTS test_service + ON transcript_text + ATTRIBUTES region + WAREHOUSE = &{ warehouse } + TARGET_LAG = '1 day' + AS ( + SELECT + transcript_text, + region, + agent_id + FROM transcripts +); diff --git a/tests_integration/spcs/docker/build_and_push_all.sh b/tests_integration/spcs/docker/build_and_push_all.sh new file mode 100755 index 0000000000..9deb036a1a --- /dev/null +++ b/tests_integration/spcs/docker/build_and_push_all.sh @@ -0,0 +1,6 @@ +SCRIPT_DIR=$(dirname "$0") + +cd "$SCRIPT_DIR/echo_service" +source "build_and_push.sh" +cd "../test_counter" +source "build_and_push.sh" diff --git a/tests_integration/spcs/docker/echo_service/bootstrap_containers_setup.sh b/tests_integration/spcs/docker/echo_service/bootstrap_containers_setup.sh deleted file mode 100644 index 85f20ba779..0000000000 --- a/tests_integration/spcs/docker/echo_service/bootstrap_containers_setup.sh +++ /dev/null @@ -1,6 +0,0 @@ -set -e -export SF_REGISTRY="$(snow spcs image-registry url -c integration)" -echo "Using registry: ${SF_REGISTRY}" -docker build --platform linux/amd64 -t "${SF_REGISTRY}/snowcli_db/public/snowcli_repository/snowpark_test_echo:1" . -snow spcs image-registry token --format=json -c integration | docker login "${SF_REGISTRY}/snowcli_db/public/snowcli_repository" -u 0sessiontoken --password-stdin -docker push "${SF_REGISTRY}/snowcli_db/public/snowcli_repository/snowpark_test_echo:1" diff --git a/tests_integration/spcs/docker/echo_service/build_and_push.sh b/tests_integration/spcs/docker/echo_service/build_and_push.sh new file mode 100755 index 0000000000..a449137317 --- /dev/null +++ b/tests_integration/spcs/docker/echo_service/build_and_push.sh @@ -0,0 +1,8 @@ +set -e +export SF_REGISTRY="$(snow spcs image-registry url -c integration)" +DATABASE=$(echo "${SNOWFLAKE_CONNECTIONS_INTEGRATION_DATABASE}" | tr '[:upper:]' '[:lower:]') + +echo "Using registry: ${SF_REGISTRY}" +docker build --platform linux/amd64 -t "${SF_REGISTRY}/${DATABASE}/public/snowcli_repository/snowpark_test_echo:1" . +snow spcs image-registry token --format=json -c integration | docker login "${SF_REGISTRY}/${DATABASE}/public/snowcli_repository" -u 0sessiontoken --password-stdin +docker push "${SF_REGISTRY}/${DATABASE}/public/snowcli_repository/snowpark_test_echo:1" diff --git a/tests_integration/spcs/docker/test_counter/build_and_push.sh b/tests_integration/spcs/docker/test_counter/build_and_push.sh new file mode 100755 index 0000000000..c5c2269223 --- /dev/null +++ b/tests_integration/spcs/docker/test_counter/build_and_push.sh @@ -0,0 +1,8 @@ +set -e +export SF_REGISTRY="$(snow spcs image-registry url -c integration)" +DATABASE=$(echo "${SNOWFLAKE_CONNECTIONS_INTEGRATION_DATABASE}" | tr '[:upper:]' '[:lower:]') + +echo "Using registry: ${SF_REGISTRY}" +docker build --platform linux/amd64 -t "${SF_REGISTRY}/${DATABASE}/public/snowcli_repository/test_counter" . +snow spcs image-registry token --format=json -c integration | docker login "${SF_REGISTRY}/${DATABASE}/public/snowcli_repository" -u 0sessiontoken --password-stdin +docker push "${SF_REGISTRY}/${DATABASE}/public/snowcli_repository/test_counter" diff --git a/tests_integration/spcs/docker/test_counter/build_image.sh b/tests_integration/spcs/docker/test_counter/build_image.sh deleted file mode 100644 index 318dba96a6..0000000000 --- a/tests_integration/spcs/docker/test_counter/build_image.sh +++ /dev/null @@ -1,6 +0,0 @@ -set -e -export SF_REGISTRY="$(snow spcs image-registry url -c integration)" -echo "Using registry: ${SF_REGISTRY}" -docker build --platform linux/amd64 -t "${SF_REGISTRY}/snowcli_db/public/snowcli_repository/test_counter" . -snow spcs image-registry token --format=json -c integration | docker login "${SF_REGISTRY}/snowcli_db/public/snowcli_repository" -u 0sessiontoken --password-stdin -docker push "${SF_REGISTRY}/snowcli_db/public/snowcli_repository/test_counter" diff --git a/tests_integration/spcs/test_image_repository.py b/tests_integration/spcs/test_image_repository.py index 046a127e10..be1da3febd 100644 --- a/tests_integration/spcs/test_image_repository.py +++ b/tests_integration/spcs/test_image_repository.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import os import pytest from snowflake.cli.api.project.util import escape_like_pattern @@ -18,14 +19,16 @@ from tests_integration.test_utils import contains_row_with, row_from_snowflake_session from tests_integration.testing_utils import ObjectNameProvider -INTEGRATION_DATABASE = "SNOWCLI_DB" +INTEGRATION_DATABASE = os.environ.get( + "SNOWFLAKE_CONNECTIONS_INTEGRATION_DATABASE", "SNOWCLI_DB" +) INTEGRATION_SCHEMA = "PUBLIC" INTEGRATION_REPOSITORY = "snowcli_repository" @pytest.mark.integration def test_list_images_tags(runner): - # test assumes the testing environment has been set up with /SNOWCLI_DB/PUBLIC/snowcli_repository/snowpark_test_echo:1 + # test assumes the testing environment has been set up with //PUBLIC/snowcli_repository/snowpark_test_echo:1 _list_images(runner) _list_tags(runner) diff --git a/tests_integration/spcs/test_services.py b/tests_integration/spcs/test_services.py index 01c55c2df8..5c86f6b871 100644 --- a/tests_integration/spcs/test_services.py +++ b/tests_integration/spcs/test_services.py @@ -11,7 +11,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - import uuid from typing import Tuple diff --git a/tests_integration/spcs/testing_utils/spcs_services_utils.py b/tests_integration/spcs/testing_utils/spcs_services_utils.py index 28aea80096..3d233561fe 100644 --- a/tests_integration/spcs/testing_utils/spcs_services_utils.py +++ b/tests_integration/spcs/testing_utils/spcs_services_utils.py @@ -14,14 +14,14 @@ import json import math +import os import time from textwrap import dedent -from typing import Union import pytest from snowflake.connector import SnowflakeConnection -from tests_integration.conftest import SnowCLIRunner, CommandResult +from tests_integration.conftest import SnowCLIRunner from tests_integration.test_utils import contains_row_with, not_contains_row_with from tests_integration.testing_utils.assertions.test_result_assertions import ( assert_that_result_is_successful_and_executed_successfully, @@ -44,7 +44,9 @@ def __init__( class SnowparkServicesTestSteps: compute_pool = "snowcli_compute_pool" - database = "snowcli_db" + database = os.environ.get( + "SNOWFLAKE_CONNECTIONS_INTEGRATION_DATABASE", "SNOWCLI_DB" + ) schema = "public" container_name = "hello-world" @@ -235,12 +237,11 @@ def upgrade_service_should_change_spec(self, service_name: str): ) describe_result = self._execute_describe(service_name) - with open(spec_path, "r") as f: - assert describe_result.exit_code == 0, describe_result.output - # do not assert direct equality because the spec field in output of DESCRIBE SERVICE has some extra info - assert ( - new_container_name in describe_result.json[0]["spec"] - ), f"Container name '{new_container_name}' from spec_upgrade.yml not found in output of DESCRIBE SERVICE." + assert describe_result.exit_code == 0, describe_result.output + # do not assert direct equality because the spec field in output of DESCRIBE SERVICE has some extra info + assert ( + new_container_name in describe_result.json[0]["spec"] + ), f"Container name '{new_container_name}' from spec_upgrade.yml not found in output of DESCRIBE SERVICE." def list_endpoints_should_show_endpoint(self, service_name: str): result = self._setup.runner.invoke_with_connection_json( @@ -303,7 +304,7 @@ def _execute_logs( ) def _get_spec_path(self, spec_file_name) -> str: - return f"{self._setup.test_root_path}/spcs/spec/{spec_file_name}" + return self._setup.test_root_path / "spcs" / "spec" / spec_file_name def _get_fqn(self, service_name) -> str: return f"{self.database}.{self.schema}.{service_name}" diff --git a/tests_integration/test_object.py b/tests_integration/test_object.py index b5d9be52c1..1a8bc005d7 100644 --- a/tests_integration/test_object.py +++ b/tests_integration/test_object.py @@ -11,6 +11,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import os +from unittest import mock import pytest @@ -309,8 +311,11 @@ def test_create_error_schema_not_exist(runner, test_database): @pytest.mark.integration +@mock.patch.dict(os.environ, os.environ, clear=True) def test_create_error_undefined_database(runner): # undefined database + del os.environ["SNOWFLAKE_CONNECTIONS_INTEGRATION_DATABASE"] + result = runner.invoke_with_connection( ["object", "create", "schema", f"name=test_schema"] ) diff --git a/tests_integration/test_snowpark_external_access.py b/tests_integration/test_snowpark_external_access.py index a9ecfd3235..eff6d42ebb 100644 --- a/tests_integration/test_snowpark_external_access.py +++ b/tests_integration/test_snowpark_external_access.py @@ -24,7 +24,6 @@ def test_snowpark_external_access(project_directory, _test_steps, test_database): with project_directory("snowpark_external_access") as project_dir: - _test_steps.snowpark_build_should_zip_files() _test_steps.snowpark_deploy_should_finish_successfully_and_return( diff --git a/tests_integration/test_streamlit.py b/tests_integration/test_streamlit.py index f25cc83ca4..a2473a76c9 100644 --- a/tests_integration/test_streamlit.py +++ b/tests_integration/test_streamlit.py @@ -73,12 +73,18 @@ def test_streamlit_deploy( result.json, {"status": "Statement executed successfully."}, ) - expect = snowflake_session.execute_string( - f"use role {_new_streamlit_role}; show streamlits like '{streamlit_name}'; use role integration_tests;" - ) - assert contains_row_with( - rows_from_snowflake_session(expect)[1], {"name": streamlit_name.upper()} - ) + + result = snowflake_session.execute_string("select current_role()") + current_role = row_from_snowflake_session(result)[0]["CURRENT_ROLE()"] + try: + expect = snowflake_session.execute_string( + f"use role {_new_streamlit_role}; show streamlits like '{streamlit_name}'" + ) + assert contains_row_with( + rows_from_snowflake_session(expect)[1], {"name": streamlit_name.upper()} + ) + finally: + snowflake_session.execute_string(f"use role {current_role}") result = runner.invoke_with_connection_json(["streamlit", "drop", streamlit_name]) assert contains_row_with( @@ -149,12 +155,17 @@ def test_streamlit_deploy_experimental_twice( result.json, {"status": "Statement executed successfully."}, ) - expect = snowflake_session.execute_string( - f"use role {_new_streamlit_role}; show streamlits like '{streamlit_name}'; use role integration_tests;" - ) - assert contains_row_with( - rows_from_snowflake_session(expect)[1], {"name": streamlit_name.upper()} - ) + result = snowflake_session.execute_string("select current_role()") + current_role = row_from_snowflake_session(result)[0]["CURRENT_ROLE()"] + try: + expect = snowflake_session.execute_string( + f"use role {_new_streamlit_role}; show streamlits like '{streamlit_name}'" + ) + assert contains_row_with( + rows_from_snowflake_session(expect)[1], {"name": streamlit_name.upper()} + ) + finally: + snowflake_session.execute_string(f"use role {current_role}") result = runner.invoke_with_connection_json( ["object", "drop", "streamlit", streamlit_name]