Skip to content

Commit

Permalink
Remove tensorflow_addons (#3025)
Browse files Browse the repository at this point in the history
### Changes

Remove tensorflow_addons dependency.

### Reason for changes

tensorflow_addons  was deprecated

### Related tickets

- Ref: 155320
<!--- Post the numerical ID of the ticket, if available -->

### Tests

Current scope
  • Loading branch information
andrey-churkin authored Oct 23, 2024
1 parent efdceca commit d88422a
Show file tree
Hide file tree
Showing 8 changed files with 11 additions and 29 deletions.
1 change: 0 additions & 1 deletion docs/api/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,6 @@ def collect_api_entities() -> APIInfo:
"openvino",
"tensorflow",
"keras",
"tensorflow_addons",
# Need add backend implementation functions to avoid endless loops on registered functions by mock module,
"nncf.tensor.functions.numpy_numeric",
"nncf.tensor.functions.numpy_linalg",
Expand Down
5 changes: 2 additions & 3 deletions examples/tensorflow/classification/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
from pathlib import Path

import tensorflow as tf
import tensorflow_addons as tfa

import nncf
from examples.common.paths import configure_paths
Expand Down Expand Up @@ -199,8 +198,8 @@ def run(config):
metrics = [
tf.keras.metrics.CategoricalAccuracy(name="acc@1"),
tf.keras.metrics.TopKCategoricalAccuracy(k=5, name="acc@5"),
tfa.metrics.MeanMetricWrapper(loss_obj, name="ce_loss"),
tfa.metrics.MeanMetricWrapper(compression_ctrl.loss, name="cr_loss"),
tf.keras.metrics.MeanMetricWrapper(loss_obj, name="ce_loss"),
tf.keras.metrics.MeanMetricWrapper(compression_ctrl.loss, name="cr_loss"),
]

compress_model.compile(
Expand Down
13 changes: 2 additions & 11 deletions examples/tensorflow/common/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
# limitations under the License.

import tensorflow as tf
import tensorflow_addons as tfa

from examples.tensorflow.common.logger import logger

Expand All @@ -33,7 +32,7 @@ def build_optimizer(config, scheduler):
weight_decay = optimizer_config.get("weight_decay", None)
common_params = {"learning_rate": scheduler, "nesterov": nesterov, "momentum": momentum}
if weight_decay:
optimizer = tfa.optimizers.SGDW(**common_params, weight_decay=weight_decay)
optimizer = tf.keras.optimizers.SGD(**common_params, weight_decay=weight_decay)
else:
optimizer = tf.keras.optimizers.SGD(**common_params)
elif optimizer_type == "rmsprop":
Expand All @@ -59,18 +58,10 @@ def build_optimizer(config, scheduler):
"amsgrad": amsgrad,
}
if weight_decay:
optimizer = tfa.optimizers.AdamW(**common_params, weight_decay=weight_decay)
optimizer = tf.keras.optimizers.AdamW(**common_params, weight_decay=weight_decay)
else:
optimizer = tf.keras.optimizers.Adam(**common_params)
else:
raise ValueError("Unknown optimizer %s" % optimizer_type)

moving_average_decay = optimizer_params.get("moving_average_decay", 0.0)
if moving_average_decay > 0.0:
logger.info("Including moving average decay.")
optimizer = tfa.optimizers.MovingAverage(optimizer, average_decay=moving_average_decay, num_updates=None)
if optimizer_params.get("lookahead", None):
logger.info("Using lookahead optimizer.")
optimizer = tfa.optimizers.Lookahead(optimizer)

return optimizer
2 changes: 0 additions & 2 deletions examples/tensorflow/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,6 @@ absl-py==1.0.0
tensorflow
tensorflow_datasets==4.2.0
tensorflow_hub
tensorflow_addons==0.20.0; python_version < '3.9'
tensorflow_addons==0.23.0; python_version >= '3.9'
tensorflow-metadata==1.13.0
opencv-python
pycocotools==2.0.6
1 change: 0 additions & 1 deletion nncf/tensor/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,6 @@ tensor_a[0:2] # Tensor(array([[1],[2]]))
"onnxruntime",
"openvino",
"tensorflow",
"tensorflow_addons",
"nncf.tensor.functions.torch_*",
"nncf.tensor.functions.numpy_*",
"nncf.tensor.functions.<NEW_BACKEND>_*",
Expand Down
11 changes: 5 additions & 6 deletions nncf/tensorflow/accuracy_aware_training/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
import os.path as osp

import tensorflow as tf
import tensorflow_addons as tfa
from tensorflow.keras.optimizers import schedules

from nncf.common.accuracy_aware_training.runner import BaseAccuracyAwareTrainingRunner
Expand All @@ -35,14 +34,14 @@ def validate(self, model):
def reset_training(self):
self.configure_optimizers()

if isinstance(self.optimizer, tfa.optimizers.MultiOptimizer):
optimizers = [optimizer_spec.optimizer for optimizer_spec in self.optimizer.optimizer_specs]
else:
optimizers = self.optimizer if isinstance(self.optimizer, (tuple, list)) else [self.optimizer]
optimizers = self.optimizer if isinstance(self.optimizer, (tuple, list)) else [self.optimizer]

for optimizer in optimizers:
scheduler = optimizer.learning_rate
if isinstance(scheduler, tf.Variable):
# pylint: disable=protected-access
if isinstance(scheduler, tf.Variable) and not isinstance(
optimizer._learning_rate, schedules.LearningRateSchedule
):
scheduler = scheduler * self.base_lr_reduction_factor_during_search
optimizer.learning_rate = scheduler
optimizer.lr = scheduler
Expand Down
2 changes: 0 additions & 2 deletions tests/tensorflow/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
-c ../../constraints.txt
PyYAML
tensorflow_addons==0.20.0; python_version < '3.9'
tensorflow_addons==0.23.0; python_version >= '3.9'
tensorflow-metadata==1.13.0
pytest
pytest-cov
Expand Down
5 changes: 2 additions & 3 deletions tests/tensorflow/sparsity/rb/test_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@

import pytest
import tensorflow as tf
import tensorflow_addons as tfa
from tensorflow.python.framework.config import disable_op_determinism
from tensorflow.python.framework.config import enable_op_determinism

Expand Down Expand Up @@ -191,8 +190,8 @@ def on_epoch_end(self, epoch, logs=None):
metrics = [
tf.keras.metrics.CategoricalAccuracy(name="acc@1"),
tf.keras.metrics.TopKCategoricalAccuracy(k=5, name="acc@5"),
tfa.metrics.MeanMetricWrapper(loss_obj, name="ce_loss"),
tfa.metrics.MeanMetricWrapper(compress_algo.loss, name="cr_loss"),
tf.keras.metrics.MeanMetricWrapper(loss_obj, name="ce_loss"),
tf.keras.metrics.MeanMetricWrapper(compress_algo.loss, name="cr_loss"),
]

compress_model.add_loss(compress_algo.loss)
Expand Down

0 comments on commit d88422a

Please sign in to comment.