From d4278cbe7657b6551d2ecda813efc7e7dab7e11e Mon Sep 17 00:00:00 2001 From: Chris van Run Date: Tue, 27 Aug 2024 21:10:58 +0200 Subject: [PATCH] Fix deprecation warnings in tests --- gcapi/client.py | 17 ++++++++--------- tests/async_integration_tests.py | 14 +++++++------- tests/integration_tests.py | 16 ++++++++-------- 3 files changed, 23 insertions(+), 24 deletions(-) diff --git a/gcapi/client.py b/gcapi/client.py index 3925cc0..414f609 100644 --- a/gcapi/client.py +++ b/gcapi/client.py @@ -213,13 +213,12 @@ class UploadsAPI(APIBase[gcapi.models.UserUpload]): max_retries = 10 def create(self, *, filename): - return ( - yield self.yield_request( - method="POST", - path=self.base_path, - json={"filename": str(filename)}, - ) + result = yield self.yield_request( + method="POST", + path=self.base_path, + json={"filename": str(filename)}, ) + return self.model(**result) def generate_presigned_urls(self, *, pk, s3_upload_id, part_numbers): url = urljoin( @@ -254,8 +253,8 @@ def list_parts(self, *, pk, s3_upload_id): def upload_fileobj(self, *, fileobj, filename): user_upload = yield from self.create(filename=filename) - pk = user_upload["pk"] - s3_upload_id = user_upload["s3_upload_id"] + pk = user_upload.pk + s3_upload_id = user_upload.s3_upload_id try: parts = yield from self._put_fileobj( @@ -507,7 +506,7 @@ def _upload_image_files(self, *, files, **kwargs): return ( yield from self.__org_api_meta.raw_image_upload_sessions.create( - uploads=[u["api_url"] for u in uploads], **kwargs + uploads=[u.api_url for u in uploads], **kwargs ) ) diff --git a/tests/async_integration_tests.py b/tests/async_integration_tests.py index 97c5b22..7c6f731 100644 --- a/tests/async_integration_tests.py +++ b/tests/async_integration_tests.py @@ -167,7 +167,7 @@ async def test_upload_cases_to_archive( files=[TESTDATA / f for f in files], ) - us = await get_upload_session(c, us["pk"]) + us = await get_upload_session(c, us.pk) # Check that only one image was created assert len(us.image_set) == 1 @@ -255,7 +255,7 @@ async def test_upload_cases_to_archive_item_with_existing_interface( files=[TESTDATA / "image10x10x101.mha"], ) - us = await get_upload_session(c, us["pk"]) + us = await get_upload_session(c, us.pk) # Check that only one image was created assert len(us.image_set) == 1 @@ -298,7 +298,7 @@ async def test_upload_cases_to_archive_item_with_new_interface( files=[TESTDATA / "image10x10x101.mha"], ) - us = await get_upload_session(c, us["pk"]) + us = await get_upload_session(c, us.pk) # Check that only one image was created assert len(us.image_set) == 1 @@ -325,7 +325,7 @@ async def test_download_cases(local_grand_challenge, files, tmpdir): files=[TESTDATA / f for f in files], ) - us = await get_upload_session(c, us["pk"]) + us = await get_upload_session(c, us.pk) # Check that we can download the uploaded image tmpdir = Path(tmpdir) @@ -378,10 +378,10 @@ async def run_job(): # algorithm might not be ready yet job = await run_job() - assert job["status"] == "Queued" - assert len(job["inputs"]) == 1 + assert job.status == "Queued" + assert len(job.inputs) == 1 - job = await c.algorithm_jobs.detail(job["pk"]) + job = await c.algorithm_jobs.detail(job.pk) assert job.status in {"Queued", "Started"} diff --git a/tests/integration_tests.py b/tests/integration_tests.py index 733d0f9..d868120 100644 --- a/tests/integration_tests.py +++ b/tests/integration_tests.py @@ -147,7 +147,7 @@ def test_upload_cases_to_archive(local_grand_challenge, files, interface): files=[TESTDATA / f for f in files], ) - us = get_upload_session(c, us["pk"]) + us = get_upload_session(c, us.pk) # Check that only one image was created assert len(us.image_set) == 1 @@ -175,7 +175,7 @@ def test_upload_cases_to_archive(local_grand_challenge, files, interface): ) # And that we can download it - response = c(url=image["files"][0]["file"], follow_redirects=True) + response = c(url=image.files[0].file, follow_redirects=True) assert response.status_code == 200 @@ -224,7 +224,7 @@ def test_upload_cases_to_archive_item_with_existing_interface( files=[TESTDATA / "image10x10x101.mha"], ) - us = get_upload_session(c, us["pk"]) + us = get_upload_session(c, us.pk) # Check that only one image was created assert len(us.image_set) == 1 @@ -255,7 +255,7 @@ def test_upload_cases_to_archive_item_with_new_interface( files=[TESTDATA / "image10x10x101.mha"], ) - us = get_upload_session(c, us["pk"]) + us = get_upload_session(c, us.pk) # Check that only one image was created assert len(us.image_set) == 1 @@ -280,7 +280,7 @@ def test_download_cases(local_grand_challenge, files, tmpdir): files=[TESTDATA / f for f in files], ) - us = get_upload_session(c, us["pk"]) + us = get_upload_session(c, us.pk) # Check that we can download the uploaded image tmpdir = Path(tmpdir) @@ -336,9 +336,9 @@ def run_job(): # algorithm might not be ready yet job = run_job() - assert job["status"] == "Queued" - assert len(job["inputs"]) == 1 - job = c.algorithm_jobs.detail(job["pk"]) + assert job.status == "Queued" + assert len(job.inputs) == 1 + job = c.algorithm_jobs.detail(job.pk) assert job.status in {"Queued", "Started"}