diff --git a/optimum/habana/diffusers/pipelines/pipeline_utils.py b/optimum/habana/diffusers/pipelines/pipeline_utils.py index 0dfe826b68..83a3a04116 100644 --- a/optimum/habana/diffusers/pipelines/pipeline_utils.py +++ b/optimum/habana/diffusers/pipelines/pipeline_utils.py @@ -19,7 +19,6 @@ import inspect import os import sys -import tempfile from typing import Optional, Union import torch @@ -117,14 +116,7 @@ def __init__( ) self.gaudi_config.use_torch_autocast = False else: - with tempfile.NamedTemporaryFile() as autocast_bf16_file: - with tempfile.NamedTemporaryFile() as autocast_fp32_file: - self.gaudi_config.write_bf16_fp32_ops_to_text_files( - autocast_bf16_file.name, - autocast_fp32_file.name, - ) - os.environ["LOWER_LIST"] = str(autocast_bf16_file) - os.environ["FP32_LIST"] = str(autocast_fp32_file) + self.gaudi_config.declare_autocast_bf16_fp32_ops() # Workaround for Synapse 1.11 for full bf16 and Torch Autocast if bf16_full_eval or self.gaudi_config.use_torch_autocast: diff --git a/optimum/habana/transformers/gaudi_configuration.py b/optimum/habana/transformers/gaudi_configuration.py index c67c664216..76638d8e95 100644 --- a/optimum/habana/transformers/gaudi_configuration.py +++ b/optimum/habana/transformers/gaudi_configuration.py @@ -92,7 +92,6 @@ def declare_autocast_bf16_fp32_ops(self): self.write_bf16_fp32_ops_to_text_files( autocast_bf16_filename, autocast_fp32_filename, - autocast=True, ) os.environ["LOWER_LIST"] = autocast_bf16_filename os.environ["FP32_LIST"] = autocast_fp32_filename