From d88422a03622eea4ba333d7d13b642a8270f1871 Mon Sep 17 00:00:00 2001 From: Andrey Churkin Date: Wed, 23 Oct 2024 07:09:31 +0100 Subject: [PATCH] Remove tensorflow_addons (#3025) ### Changes Remove tensorflow_addons dependency. ### Reason for changes tensorflow_addons was deprecated ### Related tickets - Ref: 155320 ### Tests Current scope --- docs/api/source/conf.py | 1 - examples/tensorflow/classification/main.py | 5 ++--- examples/tensorflow/common/optimizer.py | 13 ++----------- examples/tensorflow/requirements.txt | 2 -- nncf/tensor/README.md | 1 - nncf/tensorflow/accuracy_aware_training/runner.py | 11 +++++------ tests/tensorflow/requirements.txt | 2 -- tests/tensorflow/sparsity/rb/test_integration.py | 5 ++--- 8 files changed, 11 insertions(+), 29 deletions(-) diff --git a/docs/api/source/conf.py b/docs/api/source/conf.py index 4637c7875d4..fe3afe0525c 100644 --- a/docs/api/source/conf.py +++ b/docs/api/source/conf.py @@ -137,7 +137,6 @@ def collect_api_entities() -> APIInfo: "openvino", "tensorflow", "keras", - "tensorflow_addons", # Need add backend implementation functions to avoid endless loops on registered functions by mock module, "nncf.tensor.functions.numpy_numeric", "nncf.tensor.functions.numpy_linalg", diff --git a/examples/tensorflow/classification/main.py b/examples/tensorflow/classification/main.py index 1f977068b56..06a850eba44 100644 --- a/examples/tensorflow/classification/main.py +++ b/examples/tensorflow/classification/main.py @@ -14,7 +14,6 @@ from pathlib import Path import tensorflow as tf -import tensorflow_addons as tfa import nncf from examples.common.paths import configure_paths @@ -199,8 +198,8 @@ def run(config): metrics = [ tf.keras.metrics.CategoricalAccuracy(name="acc@1"), tf.keras.metrics.TopKCategoricalAccuracy(k=5, name="acc@5"), - tfa.metrics.MeanMetricWrapper(loss_obj, name="ce_loss"), - tfa.metrics.MeanMetricWrapper(compression_ctrl.loss, name="cr_loss"), + tf.keras.metrics.MeanMetricWrapper(loss_obj, name="ce_loss"), + tf.keras.metrics.MeanMetricWrapper(compression_ctrl.loss, name="cr_loss"), ] compress_model.compile( diff --git a/examples/tensorflow/common/optimizer.py b/examples/tensorflow/common/optimizer.py index 78fead279ec..a84afe13782 100644 --- a/examples/tensorflow/common/optimizer.py +++ b/examples/tensorflow/common/optimizer.py @@ -10,7 +10,6 @@ # limitations under the License. import tensorflow as tf -import tensorflow_addons as tfa from examples.tensorflow.common.logger import logger @@ -33,7 +32,7 @@ def build_optimizer(config, scheduler): weight_decay = optimizer_config.get("weight_decay", None) common_params = {"learning_rate": scheduler, "nesterov": nesterov, "momentum": momentum} if weight_decay: - optimizer = tfa.optimizers.SGDW(**common_params, weight_decay=weight_decay) + optimizer = tf.keras.optimizers.SGD(**common_params, weight_decay=weight_decay) else: optimizer = tf.keras.optimizers.SGD(**common_params) elif optimizer_type == "rmsprop": @@ -59,18 +58,10 @@ def build_optimizer(config, scheduler): "amsgrad": amsgrad, } if weight_decay: - optimizer = tfa.optimizers.AdamW(**common_params, weight_decay=weight_decay) + optimizer = tf.keras.optimizers.AdamW(**common_params, weight_decay=weight_decay) else: optimizer = tf.keras.optimizers.Adam(**common_params) else: raise ValueError("Unknown optimizer %s" % optimizer_type) - moving_average_decay = optimizer_params.get("moving_average_decay", 0.0) - if moving_average_decay > 0.0: - logger.info("Including moving average decay.") - optimizer = tfa.optimizers.MovingAverage(optimizer, average_decay=moving_average_decay, num_updates=None) - if optimizer_params.get("lookahead", None): - logger.info("Using lookahead optimizer.") - optimizer = tfa.optimizers.Lookahead(optimizer) - return optimizer diff --git a/examples/tensorflow/requirements.txt b/examples/tensorflow/requirements.txt index 2a7da4ea429..2c758123698 100644 --- a/examples/tensorflow/requirements.txt +++ b/examples/tensorflow/requirements.txt @@ -4,8 +4,6 @@ absl-py==1.0.0 tensorflow tensorflow_datasets==4.2.0 tensorflow_hub -tensorflow_addons==0.20.0; python_version < '3.9' -tensorflow_addons==0.23.0; python_version >= '3.9' tensorflow-metadata==1.13.0 opencv-python pycocotools==2.0.6 diff --git a/nncf/tensor/README.md b/nncf/tensor/README.md index 2b34fc0a46f..ff912d8a9d4 100644 --- a/nncf/tensor/README.md +++ b/nncf/tensor/README.md @@ -191,7 +191,6 @@ tensor_a[0:2] # Tensor(array([[1],[2]])) "onnxruntime", "openvino", "tensorflow", - "tensorflow_addons", "nncf.tensor.functions.torch_*", "nncf.tensor.functions.numpy_*", "nncf.tensor.functions._*", diff --git a/nncf/tensorflow/accuracy_aware_training/runner.py b/nncf/tensorflow/accuracy_aware_training/runner.py index d21411835ab..42553286f1f 100644 --- a/nncf/tensorflow/accuracy_aware_training/runner.py +++ b/nncf/tensorflow/accuracy_aware_training/runner.py @@ -12,7 +12,6 @@ import os.path as osp import tensorflow as tf -import tensorflow_addons as tfa from tensorflow.keras.optimizers import schedules from nncf.common.accuracy_aware_training.runner import BaseAccuracyAwareTrainingRunner @@ -35,14 +34,14 @@ def validate(self, model): def reset_training(self): self.configure_optimizers() - if isinstance(self.optimizer, tfa.optimizers.MultiOptimizer): - optimizers = [optimizer_spec.optimizer for optimizer_spec in self.optimizer.optimizer_specs] - else: - optimizers = self.optimizer if isinstance(self.optimizer, (tuple, list)) else [self.optimizer] + optimizers = self.optimizer if isinstance(self.optimizer, (tuple, list)) else [self.optimizer] for optimizer in optimizers: scheduler = optimizer.learning_rate - if isinstance(scheduler, tf.Variable): + # pylint: disable=protected-access + if isinstance(scheduler, tf.Variable) and not isinstance( + optimizer._learning_rate, schedules.LearningRateSchedule + ): scheduler = scheduler * self.base_lr_reduction_factor_during_search optimizer.learning_rate = scheduler optimizer.lr = scheduler diff --git a/tests/tensorflow/requirements.txt b/tests/tensorflow/requirements.txt index d9924cc6765..b63a60eeed2 100644 --- a/tests/tensorflow/requirements.txt +++ b/tests/tensorflow/requirements.txt @@ -1,7 +1,5 @@ -c ../../constraints.txt PyYAML -tensorflow_addons==0.20.0; python_version < '3.9' -tensorflow_addons==0.23.0; python_version >= '3.9' tensorflow-metadata==1.13.0 pytest pytest-cov diff --git a/tests/tensorflow/sparsity/rb/test_integration.py b/tests/tensorflow/sparsity/rb/test_integration.py index 0935b3d2ef3..728634bbf76 100644 --- a/tests/tensorflow/sparsity/rb/test_integration.py +++ b/tests/tensorflow/sparsity/rb/test_integration.py @@ -14,7 +14,6 @@ import pytest import tensorflow as tf -import tensorflow_addons as tfa from tensorflow.python.framework.config import disable_op_determinism from tensorflow.python.framework.config import enable_op_determinism @@ -191,8 +190,8 @@ def on_epoch_end(self, epoch, logs=None): metrics = [ tf.keras.metrics.CategoricalAccuracy(name="acc@1"), tf.keras.metrics.TopKCategoricalAccuracy(k=5, name="acc@5"), - tfa.metrics.MeanMetricWrapper(loss_obj, name="ce_loss"), - tfa.metrics.MeanMetricWrapper(compress_algo.loss, name="cr_loss"), + tf.keras.metrics.MeanMetricWrapper(loss_obj, name="ce_loss"), + tf.keras.metrics.MeanMetricWrapper(compress_algo.loss, name="cr_loss"), ] compress_model.add_loss(compress_algo.loss)