From 1d75eac7716d7e575a6bdf7c9c69df2f0597833f Mon Sep 17 00:00:00 2001 From: Nikita Malinin Date: Thu, 24 Oct 2024 12:09:04 +0200 Subject: [PATCH] Backend-agnostic telemetry update (#3028) ### Changes - Added the possibility of using backend-agnostic categories for telemetry. ### Reason for changes - Telemetry coverage extension. ### Related tickets - 154833 ### Tests - Updated `tests/common/test_telemetry.py` with new case. --- nncf/onnx/quantization/quantize_model.py | 4 --- nncf/openvino/quantization/quantize_model.py | 23 ------------ nncf/quantization/quantize_model.py | 35 ++++++++++++++++++ nncf/telemetry/decorator.py | 7 ++++ nncf/telemetry/events.py | 28 ++++++++++++++- nncf/telemetry/wrapper.py | 2 +- nncf/torch/quantization/quantize_model.py | 17 --------- tests/common/test_telemetry.py | 37 ++++++++++++++++++++ 8 files changed, 107 insertions(+), 46 deletions(-) diff --git a/nncf/onnx/quantization/quantize_model.py b/nncf/onnx/quantization/quantize_model.py index 094b98e81af..7a4665d1a0c 100644 --- a/nncf/onnx/quantization/quantize_model.py +++ b/nncf/onnx/quantization/quantize_model.py @@ -32,15 +32,11 @@ from nncf.quantization.algorithms.post_training.algorithm import PostTrainingQuantization from nncf.quantization.quantize_model import quantize_with_tune_hyperparams from nncf.quantization.quantize_model import warning_model_no_batchwise_support -from nncf.quantization.telemetry_extractors import CompressionStartedWithQuantizeApi from nncf.scopes import IgnoredScope -from nncf.telemetry import tracked_function -from nncf.telemetry.events import NNCF_ONNX_CATEGORY TTensor = TypeVar("TTensor") -@tracked_function(NNCF_ONNX_CATEGORY, [CompressionStartedWithQuantizeApi(), "target_device", "preset"]) def quantize_impl( model: onnx.ModelProto, calibration_dataset: Dataset, diff --git a/nncf/openvino/quantization/quantize_model.py b/nncf/openvino/quantization/quantize_model.py index 3c5e3296f9d..37666a0980b 100644 --- a/nncf/openvino/quantization/quantize_model.py +++ b/nncf/openvino/quantization/quantize_model.py @@ -49,18 +49,12 @@ from nncf.quantization.quantize_model import is_model_no_batchwise_support from nncf.quantization.quantize_model import quantize_with_tune_hyperparams from nncf.quantization.quantize_model import warning_model_no_batchwise_support -from nncf.quantization.telemetry_extractors import CompressionStartedWithCompressWeightsApi -from nncf.quantization.telemetry_extractors import CompressionStartedWithQuantizeApi -from nncf.quantization.telemetry_extractors import CompressionStartedWithQuantizeWithAccuracyControlApi from nncf.scopes import IgnoredScope from nncf.scopes import validate_ignored_scope -from nncf.telemetry.decorator import tracked_function -from nncf.telemetry.events import NNCF_OV_CATEGORY TTensor = TypeVar("TTensor") -@tracked_function(NNCF_OV_CATEGORY, [CompressionStartedWithQuantizeApi(), "target_device", "preset"]) def native_quantize_if_op_impl( model: ov.Model, calibration_dataset: Dataset, @@ -143,7 +137,6 @@ def _extract_all_subgraphs(model: ov.Model, current_id: str) -> None: return quantized_model -@tracked_function(NNCF_OV_CATEGORY, [CompressionStartedWithQuantizeApi(), "target_device", "preset"]) def native_quantize_impl( model: ov.Model, calibration_dataset: Dataset, @@ -191,10 +184,6 @@ def native_quantize_impl( return quantized_model -@tracked_function( - NNCF_OV_CATEGORY, - [CompressionStartedWithQuantizeWithAccuracyControlApi(), "target_device", "preset", "max_drop", "drop_type"], -) def quantize_with_accuracy_control_impl( model: ov.Model, calibration_dataset: Dataset, @@ -369,18 +358,6 @@ def quantize_impl( ) -@tracked_function( - NNCF_OV_CATEGORY, - [ - CompressionStartedWithCompressWeightsApi(), - "mode", - "awq", - "scale_estimation", - "gptq", - "lora_correction", - "backup_mode", - ], -) def compress_weights_impl( model: ov.Model, dataset: Dataset, diff --git a/nncf/quantization/quantize_model.py b/nncf/quantization/quantize_model.py index 03bf2689841..f475d4e52ee 100644 --- a/nncf/quantization/quantize_model.py +++ b/nncf/quantization/quantize_model.py @@ -36,7 +36,12 @@ from nncf.quantization.algorithms.hyperparameter_tuner.algorithm import HyperparameterTuner from nncf.quantization.algorithms.hyperparameter_tuner.param_grid import get_quantization_param_grids from nncf.quantization.algorithms.post_training.pipeline import create_ptq_pipeline +from nncf.quantization.telemetry_extractors import CompressionStartedWithCompressWeightsApi +from nncf.quantization.telemetry_extractors import CompressionStartedWithQuantizeApi +from nncf.quantization.telemetry_extractors import CompressionStartedWithQuantizeWithAccuracyControlApi from nncf.scopes import IgnoredScope +from nncf.telemetry.decorator import tracked_function +from nncf.telemetry.events import MODEL_BASED_CATEGORY TTensor = TypeVar("TTensor") @@ -111,6 +116,14 @@ def _update_advanced_quantization_parameters( @api(canonical_alias="nncf.quantize") +@tracked_function( + MODEL_BASED_CATEGORY, + [ + CompressionStartedWithQuantizeApi(), + "target_device", + "preset", + ], +) def quantize( model: TModel, calibration_dataset: Dataset, @@ -265,6 +278,16 @@ def wrapper(*args, **kwargs): @api(canonical_alias="nncf.quantize_with_accuracy_control") +@tracked_function( + MODEL_BASED_CATEGORY, + [ + CompressionStartedWithQuantizeWithAccuracyControlApi(), + "target_device", + "preset", + "max_drop", + "drop_type", + ], +) def quantize_with_accuracy_control( model: TModel, calibration_dataset: Dataset, @@ -380,6 +403,18 @@ def quantize_with_accuracy_control( @api(canonical_alias="nncf.compress_weights") +@tracked_function( + MODEL_BASED_CATEGORY, + [ + CompressionStartedWithCompressWeightsApi(), + "mode", + "awq", + "scale_estimation", + "gptq", + "lora_correction", + "backup_mode", + ], +) def compress_weights( model: TModel, mode=CompressWeightsMode.INT8_ASYM, diff --git a/nncf/telemetry/decorator.py b/nncf/telemetry/decorator.py index 3ccee0f9235..23ee56acc86 100644 --- a/nncf/telemetry/decorator.py +++ b/nncf/telemetry/decorator.py @@ -12,7 +12,9 @@ import inspect from typing import Callable, List, Union +from nncf.telemetry.events import MODEL_BASED_CATEGORY from nncf.telemetry.events import get_current_category +from nncf.telemetry.events import get_model_based_category from nncf.telemetry.events import telemetry_category from nncf.telemetry.extractors import CollectedEvent from nncf.telemetry.extractors import TelemetryExtractor @@ -49,6 +51,11 @@ def __call__(self, fn: Callable) -> Callable: def wrapped(*args, **kwargs): bound_args = fn_signature.bind(*args, **kwargs) bound_args.apply_defaults() + + if self._category == MODEL_BASED_CATEGORY: + model_argument = bound_args.arguments.get("model", None) + self._category = get_model_based_category(model_argument) + events: List[CollectedEvent] = [] for collector in self._collectors: argname = collector.argname diff --git a/nncf/telemetry/events.py b/nncf/telemetry/events.py index b7f6ebdc95b..a048fbbb0c7 100644 --- a/nncf/telemetry/events.py +++ b/nncf/telemetry/events.py @@ -10,15 +10,25 @@ # limitations under the License. from contextlib import contextmanager -from typing import Optional +from typing import Optional, TypeVar +from nncf.common.utils.backend import BackendType +from nncf.common.utils.backend import get_backend + +# Backend categories NNCF_TF_CATEGORY = "nncf_tf" NNCF_PT_CATEGORY = "nncf_pt" +NNCF_PT_FX_CATEGORY = "nncf_pt_fx" NNCF_ONNX_CATEGORY = "nncf_onnx" NNCF_OV_CATEGORY = "nncf_ov" +# Dynamic categories +MODEL_BASED_CATEGORY = "model_based" + CURRENT_CATEGORY = None +TModel = TypeVar("TModel") + def _set_current_category(category: str): global CURRENT_CATEGORY @@ -29,6 +39,22 @@ def get_current_category() -> Optional[str]: return CURRENT_CATEGORY +def get_model_based_category(model: TModel) -> str: + category_by_backend = { + BackendType.ONNX: NNCF_ONNX_CATEGORY, + BackendType.OPENVINO: NNCF_OV_CATEGORY, + BackendType.TORCH: NNCF_PT_CATEGORY, + BackendType.TENSORFLOW: NNCF_TF_CATEGORY, + BackendType.TORCH_FX: NNCF_PT_FX_CATEGORY, + } + category = None + if model is not None: + model_backend = get_backend(model) + category = category_by_backend[model_backend] + + return category + + @contextmanager def telemetry_category(category: str) -> str: previous_category = get_current_category() diff --git a/nncf/telemetry/wrapper.py b/nncf/telemetry/wrapper.py index fecd9ad0971..2cc32af0158 100644 --- a/nncf/telemetry/wrapper.py +++ b/nncf/telemetry/wrapper.py @@ -16,10 +16,10 @@ from typing import Callable, Optional from unittest.mock import MagicMock -from nncf import __version__ from nncf.common.logging import nncf_logger from nncf.definitions import NNCF_CI_ENV_VAR_NAME from nncf.definitions import NNCF_DEV_ENV_VAR_NAME +from nncf.version import __version__ as __version__ NNCFTelemetryStub = MagicMock diff --git a/nncf/torch/quantization/quantize_model.py b/nncf/torch/quantization/quantize_model.py index b3975b15b8e..23cb451f5fe 100644 --- a/nncf/torch/quantization/quantize_model.py +++ b/nncf/torch/quantization/quantize_model.py @@ -29,18 +29,13 @@ from nncf.quantization.algorithms.post_training.algorithm import PostTrainingQuantization from nncf.quantization.algorithms.weight_compression.algorithm import WeightCompression from nncf.quantization.quantize_model import warning_model_no_batchwise_support -from nncf.quantization.telemetry_extractors import CompressionStartedWithCompressWeightsApi -from nncf.quantization.telemetry_extractors import CompressionStartedWithQuantizeApi from nncf.scopes import IgnoredScope -from nncf.telemetry.decorator import tracked_function -from nncf.telemetry.events import NNCF_PT_CATEGORY from nncf.torch.graph.operator_metatypes import OPERATIONS_OUTPUT_HAS_NO_BATCH_AXIS from nncf.torch.model_creation import wrap_model DEFAULT_RANGE_TYPE = "mean_min_max" -@tracked_function(NNCF_PT_CATEGORY, [CompressionStartedWithQuantizeApi(), "target_device", "preset"]) def quantize_impl( model: torch.nn.Module, calibration_dataset: Dataset, @@ -86,18 +81,6 @@ def quantize_impl( return quantized_model -@tracked_function( - NNCF_PT_CATEGORY, - [ - CompressionStartedWithCompressWeightsApi(), - "mode", - "awq", - "scale_estimation", - "gptq", - "lora_correction", - "backup_mode", - ], -) def compress_weights_impl( model: torch.nn.Module, dataset: Dataset, diff --git a/tests/common/test_telemetry.py b/tests/common/test_telemetry.py index 16ee9b503fd..b2962080509 100644 --- a/tests/common/test_telemetry.py +++ b/tests/common/test_telemetry.py @@ -18,10 +18,17 @@ import pytest +from nncf.common.utils.backend import BackendType from nncf.definitions import NNCF_CI_ENV_VAR_NAME from nncf.definitions import NNCF_DEV_ENV_VAR_NAME from nncf.telemetry import TelemetryExtractor from nncf.telemetry import tracked_function +from nncf.telemetry.events import MODEL_BASED_CATEGORY +from nncf.telemetry.events import NNCF_ONNX_CATEGORY +from nncf.telemetry.events import NNCF_OV_CATEGORY +from nncf.telemetry.events import NNCF_PT_CATEGORY +from nncf.telemetry.events import NNCF_PT_FX_CATEGORY +from nncf.telemetry.events import NNCF_TF_CATEGORY from nncf.telemetry.extractors import CollectedEvent from nncf.telemetry.wrapper import NNCFTelemetryStub from nncf.telemetry.wrapper import skip_if_raised @@ -205,3 +212,33 @@ def test_skip_if_raised(): # Incorrect args wrapped(1, 2, 3) assert raises.call_count == 1 + + +@pytest.mark.parametrize( + "backend_type, reference_category", + [ + (BackendType.OPENVINO, NNCF_OV_CATEGORY), + (BackendType.TENSORFLOW, NNCF_TF_CATEGORY), + (BackendType.ONNX, NNCF_ONNX_CATEGORY), + (BackendType.TORCH, NNCF_PT_CATEGORY), + (BackendType.TORCH_FX, NNCF_PT_FX_CATEGORY), + ], +) +def test_model_based_category(backend_type, reference_category, spies, mocker): + send_event_spy, start_session_event_spy, end_session_event_spy = spies + mocker.patch("nncf.telemetry.events.get_backend", return_value=backend_type) + + @tracked_function(category=MODEL_BASED_CATEGORY, extractors=["arg1"]) + def model_based_fn_to_test(model, arg1): + pass + + model_based_fn_to_test(MagicMock(), "arg1_value") + + assert start_session_event_spy.call_count == 1 + assert end_session_event_spy.call_count == 1 + assert send_event_spy.call_count == 1 + + expected_call_args_list = [ + call(event_category=reference_category, event_action="arg1", event_label="arg1_value", event_value=None), + ] + assert send_event_spy.call_args_list == expected_call_args_list