diff --git a/examples/torch/common/export.py b/examples/torch/common/export.py index c723494b43b..8d3a4c266e8 100644 --- a/examples/torch/common/export.py +++ b/examples/torch/common/export.py @@ -26,9 +26,9 @@ def export_model(ctrl: CompressionAlgorithmController, save_path: str, no_strip_ model = ctrl.model if no_strip_on_export else ctrl.strip() model = model.eval().cpu() - input_names = generate_input_names_list(len(model.input_infos)) + input_names = generate_input_names_list(len(model.nncf.input_infos)) input_tensor_list = [] - for info in model.input_infos: + for info in model.nncf.input_infos: input_shape = tuple([1] + list(info.shape)[1:]) input_tensor_list.append(torch.rand(input_shape)) diff --git a/nncf/experimental/torch/sparsity/movement/structured_mask_handler.py b/nncf/experimental/torch/sparsity/movement/structured_mask_handler.py index a4506be15bc..c7626dac4b4 100644 --- a/nncf/experimental/torch/sparsity/movement/structured_mask_handler.py +++ b/nncf/experimental/torch/sparsity/movement/structured_mask_handler.py @@ -391,7 +391,7 @@ def _create_structured_mask_context_groups( module_vs_sparse_module_info_map = {minfo.module: minfo for minfo in sparsified_module_info_list} pruning_producing_types = ["linear"] - nncf_graph = nncf_network.get_original_graph() + nncf_graph = nncf_network.nncf.get_original_graph() pruning_groups = get_pruning_groups( nncf_graph, PT_EXPERIMENTAL_PRUNING_OPERATOR_METATYPES, pruning_producing_types ) diff --git a/nncf/torch/nncf_network.py b/nncf/torch/nncf_network.py index 3c4a814326a..3ce64514848 100644 --- a/nncf/torch/nncf_network.py +++ b/nncf/torch/nncf_network.py @@ -23,7 +23,6 @@ from torch import nn from nncf import nncf_logger -from nncf.common.deprecation import warning_deprecated from nncf.common.graph import NNCFNode from nncf.common.graph import NNCFNodeName from nncf.common.graph.definitions import MODEL_INPUT_OP_NAME @@ -967,26 +966,6 @@ def nncf(self) -> NNCFNetworkInterface: # self._nncf is being set in the creation function defined in the NNCFNetworkMeta metaclass return self._nncf - def __getattr__(self, key): - """ - Only defined for purposes of deprecation warnings. This method should be removed after v2.5.0. - """ - try: - return super().__getattr__(key) - except AttributeError as e: - if hasattr(self._nncf, key): - warning_deprecated( - "Old style of accessing NNCF-specific attributes and methods on NNCFNetwork " - "objects is deprecated. " - "Access the NNCF-specific attrs through the NNCFInterface, which is " - "set up as an `nncf` attribute on the compressed model object.\n" - "For instance, instead of `compressed_model.get_graph()` " - "you should now write `compressed_model.nncf.get_graph()`.\n" - "The old style will be removed after NNCF v2.5.0" - ) - return getattr(self._nncf, key) - raise e - def __setattr__(self, key, value): # If setting `forward`, set it on the original model. if key == "forward": @@ -1004,16 +983,6 @@ def __setattr__(self, key, value): ) super().__setattr__(key, value) - def get_nncf_wrapped_model(self) -> "NNCFNetwork": - warning_deprecated( - "Calls to NNCFNetwork.get_nncf_wrapped_model() are deprecated and will be removed " - "in NNCF v2.6.0.\n" - "Starting from NNCF v2.5.0, the compressed model object already inherits the original " - "class of the uncompressed model and the forward signature, so the call to " - ".get_nncf_wrapped_model() may be simply omitted." - ) - return self - class NNCFSkippingIter: """ diff --git a/tests/torch/pruning/experimental/test_nodes_grouping.py b/tests/torch/pruning/experimental/test_nodes_grouping.py index 6a98b2bc39e..75e00c88894 100644 --- a/tests/torch/pruning/experimental/test_nodes_grouping.py +++ b/tests/torch/pruning/experimental/test_nodes_grouping.py @@ -474,7 +474,7 @@ def test_groups(desc: GroupTestDesc, mocker, tmp_path): pruning_producing_types = ["linear"] get_graph_spy = mocker.spy(BlockHierarchy, "_get_graph_for_visualization") not_filtered_groups = get_pruning_groups( - nncf_network.get_graph(), PT_EXPERIMENTAL_PRUNING_OPERATOR_METATYPES, pruning_producing_types, tmp_path + nncf_network.nncf.get_graph(), PT_EXPERIMENTAL_PRUNING_OPERATOR_METATYPES, pruning_producing_types, tmp_path ) nx_graph = get_graph_spy.spy_return @@ -511,7 +511,7 @@ def test_all_groups_valid(desc: GroupTestDesc): nncf_network = create_nncf_network(model, config) pruning_producing_types = ["linear"] all_groups = get_pruning_groups( - nncf_network.get_graph(), PT_EXPERIMENTAL_PRUNING_OPERATOR_METATYPES, pruning_producing_types + nncf_network.nncf.get_graph(), PT_EXPERIMENTAL_PRUNING_OPERATOR_METATYPES, pruning_producing_types ) for group in all_groups: assert group.consumers diff --git a/tests/torch/test_nncf_network.py b/tests/torch/test_nncf_network.py index 33ec823b07f..65585042f38 100644 --- a/tests/torch/test_nncf_network.py +++ b/tests/torch/test_nncf_network.py @@ -24,7 +24,6 @@ from nncf.common.graph import NNCFNode from nncf.common.graph.operator_metatypes import UnknownMetatype from nncf.common.graph.transformations.commands import TargetType -from nncf.common.logging.logger import NNCFDeprecationWarning from nncf.torch import register_module from nncf.torch.dynamic_graph.graph_tracer import ModelInputInfo from nncf.torch.dynamic_graph.operation_address import OperationAddress @@ -584,13 +583,6 @@ def test_works_when_wrapped_with_dataparallel(simple_net): dp_model(torch.zeros([10, *simple_net.INPUT_SIZE[1:]], device="cuda")) -def test_warns_on_old_style_calls(simple_net): - with pytest.warns(NNCFDeprecationWarning): - simple_net.get_graph() - with pytest.warns(NNCFDeprecationWarning): - simple_net.get_nncf_wrapped_model() - - def test_class_has_same_name_and_module_as_original(simple_net): assert simple_net.__class__.__name__ == SimplestModel.__name__ assert simple_net.__class__.__module__ == SimplestModel.__module__ @@ -657,10 +649,10 @@ def test_reset_original_unbound_forward(): inp = torch.ones((1,)) assert nncf_network.forward(inp) == inp - nncf_network.set_original_unbound_forward(model.__class__.other_forward) + nncf_network.nncf.set_original_unbound_forward(model.__class__.other_forward) assert nncf_network.forward(inp) == inp * 2 - nncf_network.reset_original_unbound_forward() + nncf_network.nncf.reset_original_unbound_forward() assert nncf_network.forward(inp) == inp