diff --git a/.github/workflows/stage.yaml b/.github/workflows/stage.yaml index ab4f7d8d..f1a7582f 100644 --- a/.github/workflows/stage.yaml +++ b/.github/workflows/stage.yaml @@ -19,6 +19,7 @@ jobs: uses: ./.github/workflows/stage_call.yaml with: resource_id: ${{inputs.resource_id}} + package_url: ${{inputs.package_url}} S3_HOST: ${{vars.S3_HOST}} S3_BUCKET: ${{vars.S3_BUCKET}} S3_FOLDER: ${{vars.S3_FOLDER}} diff --git a/.github/workflows/stage_call.yaml b/.github/workflows/stage_call.yaml index 9adb295a..bad9d53b 100644 --- a/.github/workflows/stage_call.yaml +++ b/.github/workflows/stage_call.yaml @@ -44,7 +44,7 @@ jobs: python-version: "3.12" cache: "pip" # caching pip dependencies - run: pip install -r requirements.txt - - name: stage + - id: stage run: | python scripts/stage.py "${{ inputs.resource_id }}" "${{ inputs.package_url }}" @@ -90,7 +90,7 @@ jobs: with: python-version: "3.12" cache: "pip" # caching pip dependencies - - run: pip install -r scripts/requirements.txt + - run: pip install -r requirements.txt - run: | python scripts/conclude.py "${{ inputs.resource_id }}" "${{needs.stage.outputs.version}}" diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 183d1be1..a909b9a1 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -4,6 +4,13 @@ on: push concurrency: test +env: + S3_HOST: ${{vars.S3_HOST}} + S3_BUCKET: ${{vars.S3_TEST_BUCKET}} + S3_FOLDER: ${{vars.S3_TEST_FOLDER}} + S3_ACCESS_KEY_ID: ${{secrets.S3_ACCESS_KEY_ID}} + S3_SECRET_ACCESS_KEY: ${{secrets.S3_SECRET_ACCESS_KEY}} + jobs: test-stage-wf: uses: ./.github/workflows/stage_call.yaml @@ -23,7 +30,7 @@ jobs: with: python-version: "3.12" cache: "pip" # caching pip dependencies - - run: pip install -r .github/scripts/requirements.txt + - run: pip install -r requirements.txt - run: black . - run: pyright -p pyproject.toml - run: pytest diff --git a/requirements.txt b/requirements.txt index 454362b4..897afc6f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ -bioimageio.spec @ git+https://github.com/bioimage-io/spec-bioimage-io@0b707b83b061da7584e554cedbf0c6d725980619 # TODO: chenage to released version -bioimageio.core @ git+https://github.com/bioimage-io/core-bioimage-io-python@154a7d6f3c585045bb23a5a0fcfc6f418355bc1a # TODO: chenage to released version +bioimageio.spec @ git+https://github.com/bioimage-io/spec-bioimage-io@19105665ad779014e03c7b311c0b4003ab08f752 # TODO: chenage to released version +bioimageio.core @ git+https://github.com/bioimage-io/core-bioimage-io-python@3a7875b5debc2d52b2fc87f6579afe217e1c7280 # TODO: chenage to released version black==24.2.0 loguru==0.7.2 minio==7.2.3 diff --git a/scripts/conclude.py b/scripts/conclude.py new file mode 100644 index 00000000..b8f85558 --- /dev/null +++ b/scripts/conclude.py @@ -0,0 +1,18 @@ +from typer import run +from utils.remote_resource import StagedVersion +from utils.s3_client import Client + + +def conclude( + resource_id: str, + version: int, +): + staged = StagedVersion(client=Client(), id=resource_id, version=version) + staged.set_status( + "awaiting review", + description="Thank you for your contribution! Our bioimage.io maintainers will take a look soon.", + ) + + +if __name__ == "__main__": + run(conclude) diff --git a/scripts/upload_model_to_zenodo.py b/scripts/upload_model_to_zenodo.py index fd89b449..0f715b2d 100644 --- a/scripts/upload_model_to_zenodo.py +++ b/scripts/upload_model_to_zenodo.py @@ -1,3 +1,4 @@ +# type: ignore import argparse import logging import os @@ -8,14 +9,13 @@ from typing import Optional from urllib.parse import quote_plus, urljoin, urlparse -import requests # type: ignore -import spdx_license_list # type: ignore -from loguru import logger # type: ignore +import requests +import spdx_license_list +from loguru import logger from packaging.version import parse as parse_version -from ruyaml import YAML # type: ignore -from s3_client import create_client, version_from_resource_path_or_s3 +from ruyaml import YAML -from scripts.conclude import update_status +# from utils.s3_client import create_client, version_from_resource_path_or_s3 yaml = YAML(typ="safe") diff --git a/scripts/utils/remote_resource.py b/scripts/utils/remote_resource.py index 132c6f70..c742408b 100644 --- a/scripts/utils/remote_resource.py +++ b/scripts/utils/remote_resource.py @@ -53,7 +53,7 @@ def stage_new_version(self, package_url: str) -> StagedVersion: try: remotezip = urllib.request.urlopen(package_url) except Exception: - logger.error("failed to open %s", package_url) + logger.error("failed to open {}", package_url) raise zipinmemory = io.BytesIO(remotezip.read()) @@ -185,16 +185,17 @@ class StagedVersion(_RemoteResourceVersion): version_prefix: ClassVar[str] = "staged/" def publish(self) -> PublishedVersion: - logger.debug("Publishing {}", self.folder) # get next version and update versions.json versions_path = f"{self.id}/versions.json" versions_data = self.client.load_file(versions_path) if versions_data is None: - versions: dict[str, Any] = {"1": {}} + versions: dict[str, Any] = {} + next_version = 1 else: versions = json.loads(versions_data) + next_version = max(map(int, versions)) + 1 - next_version = max(map(int, versions)) + 1 + logger.debug("Publishing {} as version {}", self.folder, next_version) assert next_version not in versions, (next_version, versions) diff --git a/scripts/utils/s3_client.py b/scripts/utils/s3_client.py index 9288b41e..6daa88a4 100644 --- a/scripts/utils/s3_client.py +++ b/scripts/utils/s3_client.py @@ -54,14 +54,15 @@ def put( if length == -1: part_size = 10 * 1024 * 1024 - path = f"{self.prefix}/{path}" + prefixed_path = f"{self.prefix}/{path}" _ = self._client.put_object( self.bucket, - path, + prefixed_path, file_object, length=length, part_size=part_size, ) + logger.info("Uploaded {}", self.get_file_url(path)) def put_json(self, path: str, json_value: Any): data = json.dumps(json_value).encode() diff --git a/scripts/utils/validate_format.py b/scripts/utils/validate_format.py index 2d1d603e..a750e95e 100644 --- a/scripts/utils/validate_format.py +++ b/scripts/utils/validate_format.py @@ -7,7 +7,7 @@ import pooch from bioimageio.spec import InvalidDescr, ResourceDescr, load_description -from bioimageio.spec.model import AnyModelDescr, v0_4, v0_5 +from bioimageio.spec.model import v0_4, v0_5 from packaging.version import Version from ruyaml import YAML @@ -223,7 +223,7 @@ def ensure_valid_conda_env_name(name: str) -> str: def prepare_dynamic_test_cases(rd: ResourceDescr) -> list[dict[str, str]]: validation_cases: list[dict[str, str]] = [] # construct test cases based on resource type - if isinstance(rd, AnyModelDescr): + if isinstance(rd, (v0_4.ModelDescr, v0_5.ModelDescr)): # generate validation cases per weight format for wf, entry in rd.weights: # we skip the keras validation for now, see diff --git a/tests/conftest.py b/tests/conftest.py index 2db59e62..ebb55f26 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -16,8 +16,8 @@ def client(): bucket=os.environ["S3_TEST_BUCKET"], prefix=os.environ["S3_TEST_FOLDER"] + "/pytest", ) - yield cl cl.rm_dir("") # wipe s3 test folder + yield cl @pytest.fixture(scope="session") diff --git a/tests/test_scripts/test_utils/test_remote_resource.py b/tests/test_scripts/test_utils/test_remote_resource.py index e462e9f3..0c94f47a 100644 --- a/tests/test_scripts/test_utils/test_remote_resource.py +++ b/tests/test_scripts/test_utils/test_remote_resource.py @@ -1,12 +1,19 @@ -def test_lifecycle(package_url: str, package_id: str, s3_test_folder_url: str): +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from scripts.utils.s3_client import Client + + +def test_lifecycle( + client: "Client", package_url: str, package_id: str, s3_test_folder_url: str +): from scripts.utils.remote_resource import ( PublishedVersion, RemoteResource, StagedVersion, ) - from scripts.utils.s3_client import Client - resource = RemoteResource(client=Client(), id=package_id) + resource = RemoteResource(client=client, id=package_id) staged = resource.stage_new_version(package_url) assert isinstance(staged, StagedVersion) staged_rdf_url = staged.get_rdf_url() @@ -18,5 +25,5 @@ def test_lifecycle(package_url: str, package_id: str, s3_test_folder_url: str): assert isinstance(published, PublishedVersion) published_rdf_url = published.get_rdf_url() assert ( - published_rdf_url == f"{s3_test_folder_url}frank-water-buffalo/3/files/rdf.yaml" + published_rdf_url == f"{s3_test_folder_url}frank-water-buffalo/1/files/rdf.yaml" )