diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 10e430295e..bc5af947d9 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -7,7 +7,7 @@ src/snowflake/cli/api/metrics.py @snowflakedb/snowcli @snowflakedb/nade src/snowflake/cli/api/project/definition_conversion.py @snowflakedb/snowcli @snowflakedb/nade src/snowflake/cli/api/utils/ @snowflakedb/snowcli @snowflakedb/nade -tests/api/metrics/ @snowflakedb/snowcli @snowflakedb/nade +tests/api/test_metrics.py @snowflakedb/snowcli @snowflakedb/nade tests_common/__init__.py @snowflakedb/snowcli @snowflakedb/nade tests_common/conftest.py @snowflakedb/snowcli @snowflakedb/nade tests_common/deflake.py @snowflakedb/snowcli @snowflakedb/nade diff --git a/src/snowflake/cli/_app/telemetry.py b/src/snowflake/cli/_app/telemetry.py index 9c48eb49d4..fb7a8597cd 100644 --- a/src/snowflake/cli/_app/telemetry.py +++ b/src/snowflake/cli/_app/telemetry.py @@ -67,6 +67,10 @@ class CLITelemetryField(Enum): CONFIG_FEATURE_FLAGS = "config_feature_flags" # Metrics COUNTERS = "counters" + SPANS = "spans" + COMPLETED_SPANS = "completed_spans" + NUM_SPANS_PAST_DEPTH_LIMIT = "num_spans_past_depth_limit" + NUM_SPANS_PAST_TOTAL_LIMIT = "num_spans_past_total_limit" # Information EVENT = "event" ERROR_MSG = "error_msg" @@ -129,9 +133,12 @@ def _get_command_metrics() -> TelemetryDict: cli_context = get_cli_context() return { - CLITelemetryField.COUNTERS: { - **cli_context.metrics.counters, - } + CLITelemetryField.COUNTERS: cli_context.metrics.counters, + CLITelemetryField.SPANS: { + CLITelemetryField.COMPLETED_SPANS.value: cli_context.metrics.completed_spans, + CLITelemetryField.NUM_SPANS_PAST_DEPTH_LIMIT.value: cli_context.metrics.num_spans_past_depth_limit, + CLITelemetryField.NUM_SPANS_PAST_TOTAL_LIMIT.value: cli_context.metrics.num_spans_past_total_limit, + }, } diff --git a/src/snowflake/cli/_plugins/nativeapp/artifacts.py b/src/snowflake/cli/_plugins/nativeapp/artifacts.py index 6ed38300ba..fb212107c8 100644 --- a/src/snowflake/cli/_plugins/nativeapp/artifacts.py +++ b/src/snowflake/cli/_plugins/nativeapp/artifacts.py @@ -22,6 +22,7 @@ from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, Tuple, Union from click.exceptions import ClickException +from snowflake.cli.api.cli_global_context import span from snowflake.cli.api.constants import DEFAULT_SIZE_LIMIT_MB from snowflake.cli.api.project.schemas.v1.native_app.path_mapping import PathMapping from snowflake.cli.api.project.util import to_identifier @@ -649,6 +650,7 @@ def resolve_without_follow(path: Path) -> Path: return Path(os.path.abspath(path)) +@span("bundle") def build_bundle( project_root: Path, deploy_root: Path, diff --git a/src/snowflake/cli/_plugins/nativeapp/codegen/compiler.py b/src/snowflake/cli/_plugins/nativeapp/codegen/compiler.py index 05456503fe..07302e6356 100644 --- a/src/snowflake/cli/_plugins/nativeapp/codegen/compiler.py +++ b/src/snowflake/cli/_plugins/nativeapp/codegen/compiler.py @@ -88,7 +88,10 @@ def compile_artifacts(self): if not self._should_invoke_processors(): return - with cc.phase("Invoking artifact processors"): + with ( + cc.phase("Invoking artifact processors"), + get_cli_context().metrics.span("artifact_processors"), + ): if self._bundle_ctx.generated_root.exists(): raise ClickException( f"Path {self._bundle_ctx.generated_root} already exists. Please choose a different name for your generated directory in the project definition file." diff --git a/src/snowflake/cli/_plugins/nativeapp/codegen/snowpark/python_processor.py b/src/snowflake/cli/_plugins/nativeapp/codegen/snowpark/python_processor.py index 9665aa9bd6..0241899388 100644 --- a/src/snowflake/cli/_plugins/nativeapp/codegen/snowpark/python_processor.py +++ b/src/snowflake/cli/_plugins/nativeapp/codegen/snowpark/python_processor.py @@ -48,7 +48,7 @@ NativeAppExtensionFunction, ) from snowflake.cli._plugins.stage.diff import to_stage_path -from snowflake.cli.api.cli_global_context import get_cli_context +from snowflake.cli.api.cli_global_context import get_cli_context, span from snowflake.cli.api.console import cli_console as cc from snowflake.cli.api.metrics import CLICounterField from snowflake.cli.api.project.schemas.v1.native_app.path_mapping import ( @@ -167,6 +167,7 @@ class SnowparkAnnotationProcessor(ArtifactProcessor): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) + @span("snowpark_processor") def process( self, artifact_to_process: PathMapping, diff --git a/src/snowflake/cli/_plugins/nativeapp/codegen/templates/templates_processor.py b/src/snowflake/cli/_plugins/nativeapp/codegen/templates/templates_processor.py index 06ad67f164..9e38eecc2c 100644 --- a/src/snowflake/cli/_plugins/nativeapp/codegen/templates/templates_processor.py +++ b/src/snowflake/cli/_plugins/nativeapp/codegen/templates/templates_processor.py @@ -23,7 +23,7 @@ ArtifactProcessor, ) from snowflake.cli._plugins.nativeapp.exceptions import InvalidTemplateInFileError -from snowflake.cli.api.cli_global_context import get_cli_context +from snowflake.cli.api.cli_global_context import get_cli_context, span from snowflake.cli.api.console import cli_console as cc from snowflake.cli.api.metrics import CLICounterField from snowflake.cli.api.project.schemas.v1.native_app.path_mapping import ( @@ -91,6 +91,7 @@ def expand_templates_in_file( if expanded_template != file.contents: file.edited_contents = expanded_template + @span("templates_processor") def process( self, artifact_to_process: PathMapping, diff --git a/src/snowflake/cli/_plugins/nativeapp/entities/application.py b/src/snowflake/cli/_plugins/nativeapp/entities/application.py index a93176fba6..040e60e82f 100644 --- a/src/snowflake/cli/_plugins/nativeapp/entities/application.py +++ b/src/snowflake/cli/_plugins/nativeapp/entities/application.py @@ -55,9 +55,13 @@ from snowflake.cli._plugins.nativeapp.sf_facade import get_snowflake_facade from snowflake.cli._plugins.nativeapp.utils import needs_confirmation from snowflake.cli._plugins.workspace.context import ActionContext -from snowflake.cli.api.cli_global_context import get_cli_context +from snowflake.cli.api.cli_global_context import get_cli_context, span from snowflake.cli.api.console.abc import AbstractConsole -from snowflake.cli.api.entities.common import EntityBase, get_sql_executor +from snowflake.cli.api.entities.common import ( + EntityBase, + attach_spans_to_entity_actions, + get_sql_executor, +) from snowflake.cli.api.entities.utils import ( drop_generic_object, execute_post_deploy_hooks, @@ -321,6 +325,7 @@ def append_test_resource_suffix_to_identifier( return with_suffix +@attach_spans_to_entity_actions(entity_name="app") class ApplicationEntity(EntityBase[ApplicationEntityModel]): """ A Native App application object, created from an application package. @@ -629,6 +634,7 @@ def get_objects_owned_by_application(self) -> List[ApplicationOwnedObject]: ).fetchall() return [{"name": row[1], "type": row[2]} for row in results] + @span("update_app_object") def create_or_upgrade_app( self, package: ApplicationPackageEntity, @@ -1043,6 +1049,7 @@ def stream_events( except KeyboardInterrupt: return + @span("get_snowsight_url_for_app") def get_snowsight_url(self) -> str: """Returns the URL that can be used to visit this app via Snowsight.""" name = identifier_for_url(self.name) diff --git a/src/snowflake/cli/_plugins/nativeapp/entities/application_package.py b/src/snowflake/cli/_plugins/nativeapp/entities/application_package.py index 68bd843f7e..83f32d8b52 100644 --- a/src/snowflake/cli/_plugins/nativeapp/entities/application_package.py +++ b/src/snowflake/cli/_plugins/nativeapp/entities/application_package.py @@ -49,7 +49,12 @@ from snowflake.cli._plugins.stage.diff import DiffResult from snowflake.cli._plugins.stage.manager import StageManager from snowflake.cli._plugins.workspace.context import ActionContext -from snowflake.cli.api.entities.common import EntityBase, get_sql_executor +from snowflake.cli.api.cli_global_context import span +from snowflake.cli.api.entities.common import ( + EntityBase, + attach_spans_to_entity_actions, + get_sql_executor, +) from snowflake.cli.api.entities.utils import ( drop_generic_object, execute_post_deploy_hooks, @@ -157,6 +162,7 @@ def validate_source_stage(cls, input_value: str): return input_value +@attach_spans_to_entity_actions(entity_name="app_pkg") class ApplicationPackageEntity(EntityBase[ApplicationPackageEntityModel]): """ A Native App application package. @@ -900,6 +906,7 @@ def validate_setup_script( if validation_result["status"] == "FAIL": raise SetupScriptFailedValidation() + @span("validate_setup_script") def get_validation_result( self, use_scratch_stage: bool, interactive: bool, force: bool ): diff --git a/src/snowflake/cli/api/cli_global_context.py b/src/snowflake/cli/api/cli_global_context.py index a703e60299..abd4ce09a9 100644 --- a/src/snowflake/cli/api/cli_global_context.py +++ b/src/snowflake/cli/api/cli_global_context.py @@ -17,6 +17,7 @@ from contextlib import contextmanager from contextvars import ContextVar from dataclasses import dataclass, field, replace +from functools import wraps from pathlib import Path from typing import TYPE_CHECKING, Iterator @@ -157,7 +158,7 @@ def enable_tracebacks(self) -> bool: return self._manager.enable_tracebacks @property - def metrics(self): + def metrics(self) -> CLIMetrics: return self._manager.metrics @property @@ -213,6 +214,26 @@ def get_cli_context() -> _CliGlobalContextAccess: return _CliGlobalContextAccess(get_cli_context_manager()) +def span(span_name: str): + """ + Decorator to start a command metrics span that encompasses a whole function + + Must be used instead of directly calling @get_cli_context().metrics.span(span_name) + as a decorator to ensure that the cli context is grabbed at run time instead of at + module load time, which would not reflect forking + """ + + def decorator(func): + @wraps(func) + def wrapper(*args, **kwargs): + with get_cli_context().metrics.span(span_name): + return func(*args, **kwargs) + + return wrapper + + return decorator + + @contextmanager def fork_cli_context( connection_overrides: dict | None = None, diff --git a/src/snowflake/cli/api/entities/common.py b/src/snowflake/cli/api/entities/common.py index 9f98f970ec..021830fcb2 100644 --- a/src/snowflake/cli/api/entities/common.py +++ b/src/snowflake/cli/api/entities/common.py @@ -2,6 +2,7 @@ from typing import Generic, Type, TypeVar, get_args from snowflake.cli._plugins.workspace.context import ActionContext, WorkspaceContext +from snowflake.cli.api.cli_global_context import span from snowflake.cli.api.sql_execution import SqlExecutor @@ -20,6 +21,31 @@ class EntityActions(str, Enum): T = TypeVar("T") +def attach_spans_to_entity_actions(entity_name: str): + """ + Class decorator for EntityBase subclasses to automatically wrap + every implemented entity action method with a metrics span + + Args: + entity_name (str): Custom name for entity type to be displayed in metrics + """ + + def decorator(cls: type[EntityBase]) -> type[EntityBase]: + for attr_name, attr_value in vars(cls).items(): + is_entity_action = attr_name in [ + enum_member for enum_member in EntityActions + ] + + if is_entity_action and callable(attr_value): + attr_name_without_action_prefix = attr_name.partition("_")[2] + span_name = f"action.{entity_name}.{attr_name_without_action_prefix}" + action_with_span = span(span_name)(attr_value) + setattr(cls, attr_name, action_with_span) + return cls + + return decorator + + class EntityBase(Generic[T]): """ Base class for the fully-featured entity classes. diff --git a/src/snowflake/cli/api/entities/utils.py b/src/snowflake/cli/api/entities/utils.py index 2a49187851..4c5b8b0c78 100644 --- a/src/snowflake/cli/api/entities/utils.py +++ b/src/snowflake/cli/api/entities/utils.py @@ -23,7 +23,7 @@ to_stage_path, ) from snowflake.cli._plugins.stage.utils import print_diff_to_console -from snowflake.cli.api.cli_global_context import get_cli_context +from snowflake.cli.api.cli_global_context import get_cli_context, span from snowflake.cli.api.console.abc import AbstractConsole from snowflake.cli.api.entities.common import get_sql_executor from snowflake.cli.api.errno import ( @@ -76,6 +76,7 @@ def _get_stage_paths_to_sync( return stage_paths +@span("sync_deploy_root_with_stage") def sync_deploy_root_with_stage( console: AbstractConsole, deploy_root: Path, @@ -212,7 +213,10 @@ def execute_post_deploy_hooks( get_cli_context().metrics.set_counter(CLICounterField.POST_DEPLOY_SCRIPTS, 1) - with console.phase(f"Executing {deployed_object_type} post-deploy actions"): + with ( + console.phase(f"Executing {deployed_object_type} post-deploy actions"), + get_cli_context().metrics.span("post_deploy_hooks"), + ): sql_scripts_paths = [] display_paths = [] for hook in post_deploy_hooks: diff --git a/src/snowflake/cli/api/metrics.py b/src/snowflake/cli/api/metrics.py index a94be478df..69778872e1 100644 --- a/src/snowflake/cli/api/metrics.py +++ b/src/snowflake/cli/api/metrics.py @@ -116,10 +116,8 @@ class for holding metrics span data and encapsulating related operations children: Set[CLIMetricsSpan] = field(init=False, default_factory=set) # private state - # start time of the step from the monotonic clock in order to calculate execution time - _monotonic_start: float = field( - init=False, default_factory=lambda: time.monotonic() - ) + # start time of the step from a performance counter in order to calculate execution time + _start_time: float = field(init=False, default_factory=time.perf_counter) def __hash__(self) -> int: return hash(self.span_id) @@ -154,7 +152,7 @@ def finish(self, error: Optional[BaseException] = None) -> None: if error: self.error = error - self.execution_time = time.monotonic() - self._monotonic_start + self.execution_time = time.perf_counter() - self._start_time def to_dict(self) -> Dict: """ @@ -191,9 +189,10 @@ class CLIMetrics: _in_progress_spans: List[CLIMetricsSpan] = field(init=False, default_factory=list) # list of finished steps for telemetry to process _completed_spans: List[CLIMetricsSpan] = field(init=False, default_factory=list) - # monotonic clock time of when this class was initialized to approximate when the command first started executing - _monotonic_start: float = field( - init=False, default_factory=lambda: time.monotonic(), compare=False + # clock time of a performance counter when this class was initialized + # to approximate when the command first started executing + _start_time: float = field( + init=False, default_factory=time.perf_counter, compare=False ) def clone(self) -> CLIMetrics: @@ -223,7 +222,7 @@ def current_span(self) -> Optional[CLIMetricsSpan]: return self._in_progress_spans[-1] if len(self._in_progress_spans) > 0 else None @contextmanager - def start_span(self, name: str) -> Iterator[CLIMetricsSpan]: + def span(self, name: str) -> Iterator[CLIMetricsSpan]: """ Starts a new span that tracks various metrics throughout its execution @@ -236,7 +235,7 @@ def start_span(self, name: str) -> Iterator[CLIMetricsSpan]: """ new_span = CLIMetricsSpan( name=name, - start_time=time.monotonic() - self._monotonic_start, + start_time=time.perf_counter() - self._start_time, parent=self.current_span, ) @@ -275,34 +274,29 @@ def num_spans_past_total_limit(self) -> int: @property def completed_spans(self) -> List[Dict]: """ - Returns the completed spans tracked throughout a command, sorted by start time, for reporting telemetry + Returns the completed spans tracked throughout a command for reporting telemetry Ensures that the spans we send are within the configured limits and marks certain spans as trimmed if their children would bypass the limits we set """ # take spans breadth-first within the depth and total limits # since we care more about the big picture than granularity - spans_to_report = set( - nsmallest( - n=self.SPAN_TOTAL_LIMIT, - iterable=( - span - for span in self._completed_spans - if span.span_depth <= self.SPAN_DEPTH_LIMIT - ), - key=lambda span: (span.span_depth, span.start_time), - ) + spans_to_report = nsmallest( + n=self.SPAN_TOTAL_LIMIT, + iterable=( + span + for span in self._completed_spans + if span.span_depth <= self.SPAN_DEPTH_LIMIT + ), + key=lambda span: (span.span_depth, span.start_time, span.execution_time), ) - # sort by start time to make reading the payload easier - sorted_spans_to_report = sorted( - spans_to_report, key=lambda span: span.start_time - ) + spans_to_report_set = set(spans_to_report) return [ { **span.to_dict(), - CLIMetricsSpan.TRIMMED_KEY: not span.children <= spans_to_report, + CLIMetricsSpan.TRIMMED_KEY: not span.children <= spans_to_report_set, } - for span in sorted_spans_to_report + for span in spans_to_report ] diff --git a/tests/api/metrics/__init__.py b/tests/api/metrics/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/api/metrics/test_metrics_counters.py b/tests/api/metrics/test_metrics_counters.py deleted file mode 100644 index 133269dda5..0000000000 --- a/tests/api/metrics/test_metrics_counters.py +++ /dev/null @@ -1,101 +0,0 @@ -# Copyright (c) 2024 Snowflake Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from snowflake.cli.api.metrics import CLIMetrics - - -def test_metrics_no_counters(): - # given - metrics = CLIMetrics() - - # when - - # then - assert metrics.counters == {} - assert metrics.get_counter("counter1") is None - - -def test_metrics_set_one_counter(): - # given - metrics = CLIMetrics() - - # when - metrics.set_counter("counter1", 1) - - # then - assert metrics.counters == {"counter1": 1} - assert metrics.get_counter("counter1") == 1 - - -def test_metrics_increment_new_counter(): - # given - metrics = CLIMetrics() - - # when - metrics.increment_counter("counter1") - - # then - assert metrics.counters == {"counter1": 1} - assert metrics.get_counter("counter1") == 1 - - -def test_metrics_increment_existing_counter(): - # given - metrics = CLIMetrics() - - # when - metrics.set_counter("counter1", 2) - metrics.increment_counter(name="counter1", value=2) - - # then - assert metrics.counters == {"counter1": 4} - assert metrics.get_counter("counter1") == 4 - - -def test_metrics_set_multiple_counters(): - # given - metrics = CLIMetrics() - - # when - metrics.set_counter("counter1", 1) - metrics.set_counter("counter2", 0) - metrics.set_counter(name="counter2", value=2) - - # then - assert metrics.counters == {"counter1": 1, "counter2": 2} - assert metrics.get_counter("counter1") == 1 - assert metrics.get_counter("counter2") == 2 - - -def test_metrics_set_default_new_counter(): - # given - metrics = CLIMetrics() - - # when - metrics.set_counter_default("c1", 3) - - # then - assert metrics.counters == {"c1": 3} - - -def test_metrics_set_default_existing_counter(): - # given - metrics = CLIMetrics() - - # when - metrics.set_counter("c2", 2) - metrics.set_counter_default("c2", 1) - - # then - assert metrics.counters == {"c2": 2} diff --git a/tests/api/test_global_cli_context.py b/tests/api/test_global_cli_context.py index 975ed3026d..a62bdfd5c5 100644 --- a/tests/api/test_global_cli_context.py +++ b/tests/api/test_global_cli_context.py @@ -90,10 +90,10 @@ def test_forked_context(): def test_forked_metrics_spans(): outer_metrics = get_cli_context_manager().metrics - with outer_metrics.start_span("outer_span"): + with outer_metrics.span("outer_span"): with fork_cli_context() as inner: inner_metrics = inner.metrics - with inner_metrics.start_span("inner_span"): + with inner_metrics.span("inner_span"): pass assert outer_metrics != inner_metrics diff --git a/tests/api/metrics/test_metrics_spans.py b/tests/api/test_metrics.py similarity index 79% rename from tests/api/metrics/test_metrics_spans.py rename to tests/api/test_metrics.py index 1a6934935e..aef4399097 100644 --- a/tests/api/metrics/test_metrics_spans.py +++ b/tests/api/test_metrics.py @@ -11,9 +11,9 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + import uuid from itertools import count -from unittest import mock import pytest from snowflake.cli.api.metrics import ( @@ -23,12 +23,90 @@ ) -# we need to mock time.monotonic because on windows it does not -# capture enough precision for these tests to not be flaky -@pytest.fixture -def mock_time_monotonic(): - with mock.patch("time.monotonic", side_effect=count()) as mocked_time_monotonic: - yield mocked_time_monotonic +def test_metrics_no_counters(): + # given + metrics = CLIMetrics() + + # when + + # then + assert metrics.counters == {} + assert metrics.get_counter("counter1") is None + + +def test_metrics_set_one_counter(): + # given + metrics = CLIMetrics() + + # when + metrics.set_counter("counter1", 1) + + # then + assert metrics.counters == {"counter1": 1} + assert metrics.get_counter("counter1") == 1 + + +def test_metrics_increment_new_counter(): + # given + metrics = CLIMetrics() + + # when + metrics.increment_counter("counter1") + + # then + assert metrics.counters == {"counter1": 1} + assert metrics.get_counter("counter1") == 1 + + +def test_metrics_increment_existing_counter(): + # given + metrics = CLIMetrics() + + # when + metrics.set_counter("counter1", 2) + metrics.increment_counter(name="counter1", value=2) + + # then + assert metrics.counters == {"counter1": 4} + assert metrics.get_counter("counter1") == 4 + + +def test_metrics_set_multiple_counters(): + # given + metrics = CLIMetrics() + + # when + metrics.set_counter("counter1", 1) + metrics.set_counter("counter2", 0) + metrics.set_counter(name="counter2", value=2) + + # then + assert metrics.counters == {"counter1": 1, "counter2": 2} + assert metrics.get_counter("counter1") == 1 + assert metrics.get_counter("counter2") == 2 + + +def test_metrics_set_default_new_counter(): + # given + metrics = CLIMetrics() + + # when + metrics.set_counter_default("c1", 3) + + # then + assert metrics.counters == {"c1": 3} + + +def test_metrics_set_default_existing_counter(): + # given + metrics = CLIMetrics() + + # when + metrics.set_counter("c2", 2) + metrics.set_counter_default("c2", 1) + + # then + assert metrics.counters == {"c2": 2} # helper for testing span limits @@ -39,7 +117,7 @@ def create_span(num_spans: int): if num_spans <= 0: return - with metrics.start_span(f"span-{next(counter)}"): + with metrics.span(f"span-{next(counter)}"): create_span(num_spans - 1) for _ in range(width): @@ -59,12 +137,12 @@ def test_metrics_spans_initialization_empty(): assert metrics.num_spans_past_total_limit == 0 -def test_metrics_spans_single_span_no_error_or_parent(mock_time_monotonic): +def test_metrics_spans_single_span_no_error_or_parent(): # given metrics = CLIMetrics() # when - with metrics.start_span("span1") as span1: + with metrics.span("span1") as span1: assert metrics.current_span is span1 assert metrics.current_span is None @@ -92,7 +170,7 @@ def test_metrics_spans_finish_early_is_idempotent(): metrics = CLIMetrics() # when - with metrics.start_span("span1") as span1: + with metrics.span("span1") as span1: start_time = span1.start_time span1.finish() execution_time = span1.execution_time @@ -104,15 +182,15 @@ def test_metrics_spans_finish_early_is_idempotent(): assert span1_dict[CLIMetricsSpan.EXECUTION_TIME_KEY] == execution_time -def test_metrics_spans_parent_with_one_child(mock_time_monotonic): +def test_metrics_spans_parent_with_one_child(): # given metrics = CLIMetrics() # when - with metrics.start_span("parent") as parent: + with metrics.span("parent") as parent: assert metrics.current_span is parent - with metrics.start_span("child") as child: + with metrics.span("child") as child: assert metrics.current_span is child assert metrics.current_span is parent @@ -157,20 +235,20 @@ def test_metrics_spans_parent_with_one_child(mock_time_monotonic): ) -def test_metrics_spans_parent_with_two_children_same_name(mock_time_monotonic): +def test_metrics_spans_parent_with_two_children_same_name(): # given metrics = CLIMetrics() # when - with metrics.start_span("parent") as parent: + with metrics.span("parent") as parent: assert metrics.current_span is parent - with metrics.start_span("child") as child1: + with metrics.span("child") as child1: assert metrics.current_span is child1 assert metrics.current_span is parent - with metrics.start_span("child") as child2: + with metrics.span("child") as child2: assert metrics.current_span is child2 assert metrics.current_span is parent @@ -249,7 +327,7 @@ def test_metrics_spans_error_is_propagated(): # when with pytest.raises(RuntimeError): - with metrics.start_span("step1"): + with metrics.span("step1"): raise RuntimeError() # then @@ -264,16 +342,14 @@ def test_metrics_spans_empty_name_raises_error(): # when with pytest.raises(CLIMetricsInvalidUsageError) as err: - with metrics.start_span(""): + with metrics.span(""): pass # then assert err.match("span name must not be empty") -def test_metrics_spans_passing_depth_limit_should_add_to_counter_and_not_emit( - mock_time_monotonic, -): +def test_metrics_spans_passing_depth_limit_should_add_to_counter_and_not_emit(): # given metrics = CLIMetrics() diff --git a/tests_integration/nativeapp/test_feature_metrics.py b/tests_integration/nativeapp/test_feature_metrics.py deleted file mode 100644 index 539ef47f31..0000000000 --- a/tests_integration/nativeapp/test_feature_metrics.py +++ /dev/null @@ -1,149 +0,0 @@ -# Copyright (c) 2024 Snowflake Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from shlex import split -from typing import Dict -from unittest import mock - -from snowflake.cli._app.telemetry import TelemetryEvent -from snowflake.cli.api.metrics import CLICounterField -from tests.project.fixtures import * -from tests_integration.test_utils import extract_first_telemetry_message_of_type - - -@pytest.mark.integration -@pytest.mark.parametrize( - "command,expected_counter", - [ - ( - [ - "sql", - "-q", - "select '<% ctx.env.test %>'", - "--env", - "test=value_from_cli", - ], - 1, - ), - (["sql", "-q", "select 'string'"], 0), - ], -) -@mock.patch("snowflake.connector.telemetry.TelemetryClient.try_add_log_to_batch") -def test_sql_templating_emits_counter( - mock_telemetry, - command: List[str], - expected_counter, - runner, -): - result = runner.invoke_with_connection_json(command) - - assert result.exit_code == 0 - - message = extract_first_telemetry_message_of_type( - mock_telemetry, TelemetryEvent.CMD_EXECUTION_RESULT.value - ) - - assert message["counters"][CLICounterField.SQL_TEMPLATES] == expected_counter - - -@pytest.mark.integration -@pytest.mark.parametrize( - "command," "test_project," "expected_counters", - [ - # ensure that post deploy scripts are picked up for v1 - ( - "app deploy", - "napp_application_post_deploy_v1", - { - CLICounterField.SNOWPARK_PROCESSOR: 0, - CLICounterField.TEMPLATES_PROCESSOR: 0, - CLICounterField.PDF_TEMPLATES: 0, - CLICounterField.POST_DEPLOY_SCRIPTS: 1, - CLICounterField.PACKAGE_SCRIPTS: 0, - }, - ), - # post deploy scripts should not be available for bundling since there is no deploy - ( - "ws bundle --entity-id=pkg", - "napp_templates_processors_v2", - { - CLICounterField.SNOWPARK_PROCESSOR: 0, - CLICounterField.TEMPLATES_PROCESSOR: 1, - CLICounterField.PDF_TEMPLATES: 1, - }, - ), - # ensure that templates processor is picked up - ( - "app run", - "napp_templates_processors_v1", - { - CLICounterField.SNOWPARK_PROCESSOR: 0, - CLICounterField.TEMPLATES_PROCESSOR: 1, - CLICounterField.PDF_TEMPLATES: 0, - CLICounterField.POST_DEPLOY_SCRIPTS: 0, - CLICounterField.PACKAGE_SCRIPTS: 0, - CLICounterField.EVENT_SHARING: 0, - CLICounterField.EVENT_SHARING_ERROR: 0, - CLICounterField.EVENT_SHARING_WARNING: 0, - }, - ), - # package scripts are auto-converted to post deploy scripts in v1 - ( - "app deploy", - "integration_external", - { - CLICounterField.SNOWPARK_PROCESSOR: 0, - CLICounterField.TEMPLATES_PROCESSOR: 0, - CLICounterField.PDF_TEMPLATES: 1, - CLICounterField.POST_DEPLOY_SCRIPTS: 1, - CLICounterField.PACKAGE_SCRIPTS: 1, - }, - ), - # ensure post deploy scripts are picked up for v2 - ( - "app deploy", - "integration_external_v2", - { - CLICounterField.SNOWPARK_PROCESSOR: 0, - CLICounterField.TEMPLATES_PROCESSOR: 0, - CLICounterField.PDF_TEMPLATES: 1, - CLICounterField.POST_DEPLOY_SCRIPTS: 1, - }, - ), - ], -) -@mock.patch("snowflake.connector.telemetry.TelemetryClient.try_add_log_to_batch") -def test_nativeapp_feature_counter_has_expected_value( - mock_telemetry, - runner, - nativeapp_teardown, - nativeapp_project_directory, - command: str, - test_project: str, - expected_counters: Dict[str, int], -): - local_test_env = { - "APP_DIR": "app", - "schema_name": "test_schema", - "table_name": "test_table", - "value": "test_value", - } - - with nativeapp_project_directory(test_project): - runner.invoke_with_connection(split(command), env=local_test_env) - - message = extract_first_telemetry_message_of_type( - mock_telemetry, TelemetryEvent.CMD_EXECUTION_RESULT.value - ) - - assert message["counters"] == expected_counters diff --git a/tests_integration/nativeapp/test_metrics.py b/tests_integration/nativeapp/test_metrics.py new file mode 100644 index 0000000000..a18d5183a2 --- /dev/null +++ b/tests_integration/nativeapp/test_metrics.py @@ -0,0 +1,552 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from shlex import split +from typing import Dict, Callable +from unittest import mock + +from snowflake.cli._app.telemetry import TelemetryEvent, CLITelemetryField +from snowflake.cli.api.metrics import CLICounterField, CLIMetricsSpan +from tests.nativeapp.factories import ( + ProjectV11Factory, + ProjectV2Factory, + ApplicationPackageEntityModelFactory, + ApplicationEntityModelFactory, +) +from tests.project.fixtures import * +from tests_common import temp_dir +from tests_integration.test_utils import extract_first_telemetry_message_of_type + + +def assert_spans_within_limits(spans: Dict) -> None: + assert spans[CLITelemetryField.NUM_SPANS_PAST_DEPTH_LIMIT.value] == 0 + assert spans[CLITelemetryField.NUM_SPANS_PAST_TOTAL_LIMIT.value] == 0 + assert all( + span[CLIMetricsSpan.TRIMMED_KEY] == False + for span in spans[CLITelemetryField.COMPLETED_SPANS.value] + ) + + +def extract_span_keys_to_check(spans: Dict) -> List[Dict]: + SPAN_KEYS_TO_CHECK = [CLIMetricsSpan.NAME_KEY, CLIMetricsSpan.PARENT_KEY] + + return [ + {key: span[key] for key in SPAN_KEYS_TO_CHECK} + for span in spans[CLITelemetryField.COMPLETED_SPANS.value] + ] + + +@pytest.mark.integration +@pytest.mark.parametrize( + "command,expected_counter", + [ + ( + [ + "sql", + "-q", + "select '<% ctx.env.test %>'", + "--env", + "test=value_from_cli", + ], + 1, + ), + (["sql", "-q", "select 'string'"], 0), + ], +) +@mock.patch("snowflake.connector.telemetry.TelemetryClient.try_add_log_to_batch") +def test_sql_templating_emits_counter( + mock_telemetry, + command: List[str], + expected_counter, + runner, +): + result = runner.invoke_with_connection_json(command) + + assert result.exit_code == 0 + + message = extract_first_telemetry_message_of_type( + mock_telemetry, TelemetryEvent.CMD_EXECUTION_RESULT.value + ) + + assert ( + message[CLITelemetryField.COUNTERS.value][CLICounterField.SQL_TEMPLATES] + == expected_counter + ) + + +@pytest.mark.integration +@mock.patch("snowflake.connector.telemetry.TelemetryClient.try_add_log_to_batch") +def test_feature_counters_v1_post_deploy_set_and_package_scripts_available( + mock_telemetry, + runner, + nativeapp_teardown, + temp_dir, +): + ProjectV11Factory( + pdf__native_app__artifacts=["README.md", "setup.sql", "manifest.yml"], + pdf__native_app__package__post_deploy=[ + {"sql_script": "post_deploy1.sql"}, + ], + files={ + "README.md": "", + "setup.sql": "select 1", + "manifest.yml": "\n", + "post_deploy1.sql": "\n", + }, + ) + + with nativeapp_teardown(project_dir=Path(temp_dir)): + runner.invoke_with_connection(["app", "deploy"]) + + message = extract_first_telemetry_message_of_type( + mock_telemetry, TelemetryEvent.CMD_EXECUTION_RESULT.value + ) + + assert message[CLITelemetryField.COUNTERS.value] == { + CLICounterField.SNOWPARK_PROCESSOR: 0, + CLICounterField.TEMPLATES_PROCESSOR: 0, + CLICounterField.PDF_TEMPLATES: 0, + CLICounterField.POST_DEPLOY_SCRIPTS: 1, + CLICounterField.PACKAGE_SCRIPTS: 0, + } + + +@pytest.mark.integration +@mock.patch("snowflake.connector.telemetry.TelemetryClient.try_add_log_to_batch") +def test_feature_counters_v2_post_deploy_not_available_in_bundle( + mock_telemetry, + runner, + nativeapp_teardown, + temp_dir, +): + ProjectV2Factory( + pdf__entities=dict( + pkg=ApplicationPackageEntityModelFactory( + identifier="myapp_pkg_<% ctx.env.USER %>", + artifacts=[ + "setup.sql", + "README.md", + "manifest.yml", + ], + ), + app=ApplicationEntityModelFactory( + identifier="myapp", + fromm__target="pkg", + meta__post_deploy=[ + {"sql_script": "app_post_deploy1.sql"}, + ], + ), + ), + files={ + "app_post_deploy1.sql": "\n", + "setup.sql": "CREATE OR ALTER VERSIONED SCHEMA core;", + "README.md": "\n", + "manifest.yml": "\n", + }, + ) + + with nativeapp_teardown(project_dir=Path(temp_dir)): + runner.invoke_with_connection(["ws", "bundle", "--entity-id=pkg"]) + + message = extract_first_telemetry_message_of_type( + mock_telemetry, TelemetryEvent.CMD_EXECUTION_RESULT.value + ) + + assert message[CLITelemetryField.COUNTERS.value] == { + CLICounterField.SNOWPARK_PROCESSOR: 0, + CLICounterField.TEMPLATES_PROCESSOR: 0, + CLICounterField.PDF_TEMPLATES: 1, + } + + +@pytest.mark.integration +@mock.patch("snowflake.connector.telemetry.TelemetryClient.try_add_log_to_batch") +def test_feature_counter_v2_templates_processor_set( + mock_telemetry, + runner, + nativeapp_teardown, + temp_dir, +): + ProjectV2Factory( + pdf__entities=dict( + pkg=ApplicationPackageEntityModelFactory( + identifier="myapp_pkg", + artifacts=[ + "setup.sql", + "README.md", + "manifest.yml", + {"src": "app/*", "dest": "./", "processors": ["templates"]}, + ], + ), + app=ApplicationEntityModelFactory( + identifier="myapp", + fromm__target="pkg", + ), + ), + files={ + "setup.sql": "CREATE OR ALTER VERSIONED SCHEMA core;", + "README.md": "\n", + "manifest.yml": "\n", + "app/dummy_file.md": "\n", + }, + ) + + with nativeapp_teardown(project_dir=Path(temp_dir)): + runner.invoke_with_connection(["app", "run"]) + + message = extract_first_telemetry_message_of_type( + mock_telemetry, TelemetryEvent.CMD_EXECUTION_RESULT.value + ) + + assert message[CLITelemetryField.COUNTERS.value] == { + CLICounterField.SNOWPARK_PROCESSOR: 0, + CLICounterField.TEMPLATES_PROCESSOR: 1, + CLICounterField.PDF_TEMPLATES: 0, + CLICounterField.POST_DEPLOY_SCRIPTS: 0, + CLICounterField.EVENT_SHARING: 0, + CLICounterField.EVENT_SHARING_ERROR: 0, + CLICounterField.EVENT_SHARING_WARNING: 0, + } + + +@pytest.mark.integration +@mock.patch("snowflake.connector.telemetry.TelemetryClient.try_add_log_to_batch") +def test_feature_counter_v1_package_scripts_converted_to_post_deploy_and_both_set( + mock_telemetry, + runner, + nativeapp_teardown, + temp_dir, +): + ProjectV11Factory( + pdf__native_app__package__scripts=["scripts/package_script1.sql"], + pdf__native_app__artifacts=["README.md", "setup.sql", "manifest.yml"], + files={ + "README.md": "", + "setup.sql": "select 1", + "manifest.yml": "\n", + "scripts/package_script1.sql": "\n", + }, + ) + + with nativeapp_teardown(project_dir=Path(temp_dir)): + runner.invoke_with_connection(["app", "deploy"]) + + message = extract_first_telemetry_message_of_type( + mock_telemetry, TelemetryEvent.CMD_EXECUTION_RESULT.value + ) + + assert message[CLITelemetryField.COUNTERS.value] == { + CLICounterField.SNOWPARK_PROCESSOR: 0, + CLICounterField.TEMPLATES_PROCESSOR: 0, + CLICounterField.PDF_TEMPLATES: 0, + CLICounterField.POST_DEPLOY_SCRIPTS: 1, + CLICounterField.PACKAGE_SCRIPTS: 1, + } + + +@pytest.mark.integration +@mock.patch("snowflake.connector.telemetry.TelemetryClient.try_add_log_to_batch") +def test_feature_counter_v2_post_deploy_set_and_package_scripts_not_available( + mock_telemetry, + runner, + nativeapp_teardown, + temp_dir, +): + ProjectV2Factory( + pdf__entities=dict( + pkg=ApplicationPackageEntityModelFactory( + identifier="myapp_pkg", + meta__post_deploy=[ + {"sql_script": "scripts/pkg_post_deploy1.sql"}, + ], + ), + app=ApplicationEntityModelFactory( + identifier="myapp_<% ctx.env.USER %>", + fromm__target="pkg", + ), + ), + files={ + "setup.sql": "CREATE OR ALTER VERSIONED SCHEMA core;", + "README.md": "\n", + "manifest.yml": "\n", + "scripts/pkg_post_deploy1.sql": "-- package post-deploy script\n", + }, + ) + + with nativeapp_teardown(project_dir=Path(temp_dir)): + runner.invoke_with_connection(["app", "deploy"]) + + message = extract_first_telemetry_message_of_type( + mock_telemetry, TelemetryEvent.CMD_EXECUTION_RESULT.value + ) + + assert message[CLITelemetryField.COUNTERS.value] == { + CLICounterField.SNOWPARK_PROCESSOR: 0, + CLICounterField.TEMPLATES_PROCESSOR: 0, + CLICounterField.PDF_TEMPLATES: 1, + CLICounterField.POST_DEPLOY_SCRIPTS: 1, + } + + +@pytest.mark.integration +@mock.patch("snowflake.connector.telemetry.TelemetryClient.try_add_log_to_batch") +def test_spans_bundle( + mock_telemetry, + runner, + nativeapp_teardown, + temp_dir, +): + ProjectV2Factory( + pdf__entities=dict( + pkg=ApplicationPackageEntityModelFactory( + identifier="myapp_pkg", + artifacts=[ + "setup.sql", + "README.md", + "manifest.yml", + ], + ), + app=ApplicationEntityModelFactory( + identifier="myapp", + fromm__target="pkg", + ), + ), + files={ + "setup.sql": "CREATE OR ALTER VERSIONED SCHEMA core;", + "README.md": "\n", + "manifest.yml": "\n", + }, + ) + + with nativeapp_teardown(project_dir=Path(temp_dir)): + runner.invoke_with_connection(["ws", "bundle", "--entity-id=pkg"]) + + message = extract_first_telemetry_message_of_type( + mock_telemetry, TelemetryEvent.CMD_EXECUTION_RESULT.value + ) + + spans = message[CLITelemetryField.SPANS.value] + + assert_spans_within_limits(spans) + + assert extract_span_keys_to_check(spans) == [ + {"name": "action.app_pkg.bundle", "parent": None}, + {"name": "bundle", "parent": "action.app_pkg.bundle"}, + ] + + +@pytest.mark.integration +@mock.patch("snowflake.connector.telemetry.TelemetryClient.try_add_log_to_batch") +def test_spans_run_with_all_features( + mock_telemetry, + runner, + nativeapp_teardown, + temp_dir, +): + ProjectV2Factory( + pdf__entities=dict( + pkg=ApplicationPackageEntityModelFactory( + identifier="myapp_pkg", + artifacts=[ + "setup.sql", + "README.md", + "manifest.yml", + {"src": "app/*", "dest": "./", "processors": ["templates"]}, + ], + meta__post_deploy=[ + {"sql_script": "scripts/pkg_post_deploy1.sql"}, + ], + ), + app=ApplicationEntityModelFactory( + identifier="myapp", + fromm__target="pkg", + meta__post_deploy=[ + {"sql_script": "scripts/app_post_deploy1.sql"}, + ], + ), + ), + files={ + "setup.sql": "CREATE OR ALTER VERSIONED SCHEMA core;", + "README.md": "\n", + "manifest.yml": "\n", + "app/dummy_file.md": "\n", + "scripts/app_post_deploy1.sql": "select '';", + "scripts/pkg_post_deploy1.sql": "select '';", + }, + ) + + with nativeapp_teardown(project_dir=Path(temp_dir)): + runner.invoke_with_connection(["app", "run"]) + + message = extract_first_telemetry_message_of_type( + mock_telemetry, TelemetryEvent.CMD_EXECUTION_RESULT.value + ) + + spans = message[CLITelemetryField.SPANS.value] + + assert_spans_within_limits(spans) + + assert extract_span_keys_to_check(spans) == [ + { + "name": "action.app.deploy", + "parent": None, + }, + { + "name": "get_snowsight_url_for_app", + "parent": None, + }, + { + "name": "action.app_pkg.deploy", + "parent": "action.app.deploy", + }, + { + "name": "update_app_object", + "parent": "action.app.deploy", + }, + { + "name": "bundle", + "parent": "action.app_pkg.deploy", + }, + { + "name": "artifact_processors", + "parent": "action.app_pkg.deploy", + }, + { + "name": "sync_deploy_root_with_stage", + "parent": "action.app_pkg.deploy", + }, + { + "name": "post_deploy_hooks", + "parent": "action.app_pkg.deploy", + }, + { + "name": "validate_setup_script", + "parent": "action.app_pkg.deploy", + }, + { + "name": "post_deploy_hooks", + "parent": "update_app_object", + }, + { + "name": "templates_processor", + "parent": "artifact_processors", + }, + ] + + +@pytest.mark.integration +@mock.patch("snowflake.connector.telemetry.TelemetryClient.try_add_log_to_batch") +def test_spans_validate( + mock_telemetry, + runner, + nativeapp_teardown, + temp_dir, +): + ProjectV2Factory( + pdf__entities=dict( + pkg=ApplicationPackageEntityModelFactory( + identifier="myapp_pkg", + artifacts=[ + "setup.sql", + "README.md", + "manifest.yml", + ], + ), + app=ApplicationEntityModelFactory( + identifier="myapp", + fromm__target="pkg", + ), + ), + files={ + "setup.sql": "CREATE OR ALTER VERSIONED SCHEMA core;", + "README.md": "\n", + "manifest.yml": "\n", + }, + ) + + with nativeapp_teardown(project_dir=Path(temp_dir)): + runner.invoke_with_connection(["app", "validate"]) + + message = extract_first_telemetry_message_of_type( + mock_telemetry, TelemetryEvent.CMD_EXECUTION_RESULT.value + ) + + spans = message[CLITelemetryField.SPANS.value] + + assert_spans_within_limits(spans) + + assert extract_span_keys_to_check(spans) == [ + { + "name": "action.app_pkg.validate", + "parent": None, + }, + { + "name": "validate_setup_script", + "parent": "action.app_pkg.validate", + }, + { + "name": "bundle", + "parent": "validate_setup_script", + }, + { + "name": "sync_deploy_root_with_stage", + "parent": "validate_setup_script", + }, + ] + + +@pytest.mark.integration +@mock.patch("snowflake.connector.telemetry.TelemetryClient.try_add_log_to_batch") +def test_spans_teardown( + mock_telemetry, + runner, + nativeapp_teardown, + temp_dir, +): + ProjectV2Factory( + pdf__entities=dict( + pkg=ApplicationPackageEntityModelFactory( + identifier="myapp_pkg", + artifacts=[ + "setup.sql", + "README.md", + "manifest.yml", + ], + ), + app=ApplicationEntityModelFactory( + identifier="myapp", + fromm__target="pkg", + ), + ), + files={ + "setup.sql": "CREATE OR ALTER VERSIONED SCHEMA core;", + "README.md": "\n", + "manifest.yml": "\n", + }, + ) + + with nativeapp_teardown(project_dir=Path(temp_dir)): + runner.invoke_with_connection(["app", "teardown"]) + + message = extract_first_telemetry_message_of_type( + mock_telemetry, TelemetryEvent.CMD_EXECUTION_RESULT.value + ) + + spans = message[CLITelemetryField.SPANS.value] + + assert_spans_within_limits(spans) + + assert extract_span_keys_to_check(spans) == [ + {"name": "action.app.drop", "parent": None}, + {"name": "action.app_pkg.drop", "parent": None}, + ]