Skip to content

Commit

Permalink
Use torch for CUDA 11.8 in tests (openvinotoolkit#2149)
Browse files Browse the repository at this point in the history
  • Loading branch information
vshampor authored Sep 27, 2023
1 parent 304bd1d commit b16d4a1
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 12 deletions.
6 changes: 3 additions & 3 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -117,11 +117,11 @@ test-examples-tensorflow:
# PyTorch backend
install-torch-test:
pip install -U pip
pip install -e .[torch]
pip install -r tests/torch/requirements.txt
pip install -e .[torch] --index-url https://download.pytorch.org/whl/cu118 --extra-index-url=https://pypi.org/simple # ticket 119128
pip install -r tests/torch/requirements.txt --index-url https://download.pytorch.org/whl/cu118 --extra-index-url=https://pypi.org/simple
pip install -r tests/cross_fw/install/requirements.txt
pip install -r tests/cross_fw/examples/requirements.txt
pip install -r examples/torch/requirements.txt
pip install -r examples/torch/requirements.txt --index-url https://download.pytorch.org/whl/cu118 --extra-index-url=https://pypi.org/simple

install-torch-dev: install-torch-test install-pre-commit install-pylint
pip install -r examples/post_training_quantization/torch/mobilenet_v2/requirements.txt
Expand Down
12 changes: 3 additions & 9 deletions tests/torch/test_sanity_sample.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,12 +174,6 @@ def set_num_threads_locally(n=1):
torch.set_num_threads(old_n)


def _run_with_xfail_119128(runner: Command):
returncode = runner.run(assert_returncode_zero=False)
if returncode == 139:
pytest.xfail("Bug 119128: sporadic segment fault on backward")


@pytest.mark.nightly
class TestSanitySample:
@staticmethod
Expand Down Expand Up @@ -289,7 +283,7 @@ def test_pretrained_model_train(config, tmp_path, multiprocessing_distributed, c
)

runner = Command(create_command_line(args, config["sample_type"]), env=ROOT_PYTHONPATH_ENV)
_run_with_xfail_119128(runner)
runner.run(assert_returncode_zero=False)
last_checkpoint_path = os.path.join(checkpoint_save_dir, get_run_name(config_factory.config) + "_last.pth")
assert os.path.exists(last_checkpoint_path)
if "compression" in config["sample_config"]:
Expand Down Expand Up @@ -374,7 +368,7 @@ def test_resume(request, config, tmp_path, multiprocessing_distributed, case_com
args["--multiprocessing-distributed"] = True

runner = Command(create_command_line(args, config["sample_type"]), env=ROOT_PYTHONPATH_ENV)
_run_with_xfail_119128(runner)
runner.run(assert_returncode_zero=False)
last_checkpoint_path = os.path.join(checkpoint_save_dir, get_run_name(config_factory.config) + "_last.pth")
assert os.path.exists(last_checkpoint_path)
if "compression" in config["sample_config"]:
Expand Down Expand Up @@ -590,7 +584,7 @@ def test_accuracy_aware_training_pipeline(accuracy_aware_config, tmp_path, multi
args["--multiprocessing-distributed"] = True

runner = Command(create_command_line(args, accuracy_aware_config["sample_type"]), env=ROOT_PYTHONPATH_ENV)
_run_with_xfail_119128(runner)
runner.run(assert_returncode_zero=False)

checkpoint_save_dir = log_dir / get_run_name(config_factory.config)
aa_checkpoint_path = get_accuracy_aware_checkpoint_dir_path(checkpoint_save_dir)
Expand Down

0 comments on commit b16d4a1

Please sign in to comment.