diff --git a/.github/workflows/backup.yaml b/.github/workflows/backup.yaml new file mode 100644 index 00000000..2c54c03d --- /dev/null +++ b/.github/workflows/backup.yaml @@ -0,0 +1,26 @@ +name: backup + +on: + schedule: + - cron: "0 0 * * 0" # every sunday at midnight + +concurrency: ${{vars.ZENODO_URL}} + +env: + S3_HOST: ${{vars.S3_HOST}} + S3_BUCKET: ${{vars.S3_BUCKET}} + S3_FOLDER: ${{vars.S3_FOLDER}} + ZENODO_URL: ${{vars.ZENODO_URL}} + S3_ACCESS_KEY_ID: ${{secrets.S3_ACCESS_KEY_ID}} + S3_SECRET_ACCESS_KEY: ${{secrets.S3_SECRET_ACCESS_KEY}} + ZENODO_API_ACCESS_TOKEN: ${{secrets.ZENODO_API_ACCESS_TOKEN}} + +jobs: + call: + uses: ./.github/workflows/backup_call.yaml + with: + S3_HOST: ${{vars.S3_HOST}} + S3_BUCKET: ${{vars.S3_BUCKET}} + S3_FOLDER: ${{vars.S3_FOLDER}} + ZENODO_URL: ${{vars.ZENODO_URL}} + secrets: inherit diff --git a/.github/workflows/backup_call.yaml b/.github/workflows/backup_call.yaml new file mode 100644 index 00000000..871508aa --- /dev/null +++ b/.github/workflows/backup_call.yaml @@ -0,0 +1,41 @@ +name: backup + +on: + workflow_call: + inputs: + S3_HOST: + required: true + type: string + S3_BUCKET: + required: true + type: string + S3_FOLDER: + required: true + type: string + ZENODO_URL: + required: true + type: string + +concurrency: ${{ZENODO_URL}}-call + +env: + S3_HOST: ${{inputs.S3_HOST}} + S3_BUCKET: ${{inputs.S3_BUCKET}} + S3_FOLDER: ${{inputs.S3_FOLDER}} + ZENODO_URL: ${{inputs.ZENODO_URL}} + S3_ACCESS_KEY_ID: ${{secrets.S3_ACCESS_KEY_ID}} + S3_SECRET_ACCESS_KEY: ${{secrets.S3_SECRET_ACCESS_KEY}} + ZENODO_API_ACCESS_TOKEN: ${{secrets.ZENODO_API_ACCESS_TOKEN}} + +jobs: + publish: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: "3.12" + cache: "pip" # caching pip dependencies + - run: pip install -r requirements.txt + - run: | + python scripts/backup.py diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml new file mode 100644 index 00000000..5c8fac2c --- /dev/null +++ b/.github/workflows/publish.yaml @@ -0,0 +1,36 @@ +name: publish + +on: + workflow_dispatch: + inputs: + resource_id: + description: "Bioimageio ID of the resource - to be used to access the resource on S3" + required: true + type: string + stage_nr: + description: stage nr to publish + required: true + type: number + +concurrency: ${{inputs.resource_id}} + +env: + S3_HOST: ${{vars.S3_HOST}} + S3_BUCKET: ${{vars.S3_BUCKET}} + S3_FOLDER: ${{vars.S3_FOLDER}} + S3_ACCESS_KEY_ID: ${{secrets.S3_ACCESS_KEY_ID}} + S3_SECRET_ACCESS_KEY: ${{secrets.S3_SECRET_ACCESS_KEY}} + ZENODO_URL: ${{vars.ZENODO_URL}} + ZENODO_API_ACCESS_TOKEN: ${{secrets.ZENODO_API_ACCESS_TOKEN}} + +jobs: + call: + uses: ./.github/workflows/publish_call.yaml + with: + resource_id: ${{inputs.resource_id}} + stage_nr: ${{inputs.stage_nr}} + S3_HOST: ${{vars.S3_HOST}} + S3_BUCKET: ${{vars.S3_BUCKET}} + S3_FOLDER: ${{vars.S3_FOLDER}} + ZENODO_URL: ${{vars.ZENODO_URL}} + secrets: inherit diff --git a/.github/workflows/publish_call.yaml b/.github/workflows/publish_call.yaml new file mode 100644 index 00000000..f0b06d71 --- /dev/null +++ b/.github/workflows/publish_call.yaml @@ -0,0 +1,54 @@ +name: publish + +on: + workflow_call: + inputs: + resource_id: + description: "Bioimageio ID of the resource - to be used to access the resource on S3" + required: true + type: string + stage_nr: + description: stage nr to publish + required: true + type: number + S3_HOST: + required: true + type: string + S3_BUCKET: + required: true + type: string + S3_FOLDER: + required: true + type: string + ZENODO_URL: + required: true + type: string + +concurrency: ${{inputs.resource_id}}-call + +env: + S3_HOST: ${{inputs.S3_HOST}} + S3_BUCKET: ${{inputs.S3_BUCKET}} + S3_FOLDER: ${{inputs.S3_FOLDER}} + ZENODO_URL: ${{inputs.ZENODO_URL}} + S3_ACCESS_KEY_ID: ${{secrets.S3_ACCESS_KEY_ID}} + S3_SECRET_ACCESS_KEY: ${{secrets.S3_SECRET_ACCESS_KEY}} + ZENODO_API_ACCESS_TOKEN: ${{secrets.ZENODO_API_ACCESS_TOKEN}} + +jobs: + publish: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: "3.12" + cache: "pip" # caching pip dependencies + - run: pip install -r requirements.txt + - run: | + python scripts/publish.py "${{ inputs.resource_id }}" "${{ inputs.stage_nr }}" + # - name: Publish to Zenodo + # run: | + # python .github/scripts/update_status.py "${{ inputs.resource_path }}" "Publishing to Zenodo" "5" + # python .github/scripts/upload_model_to_zenodo.py --resource_path "${{inputs.resource_path}}" + # python .github/scripts/update_status.py "${{ inputs.resource_path }}" "Publishing complete" "6" diff --git a/.github/workflows/stage_call.yaml b/.github/workflows/stage_call.yaml index bad9d53b..62be614c 100644 --- a/.github/workflows/stage_call.yaml +++ b/.github/workflows/stage_call.yaml @@ -21,7 +21,7 @@ on: required: true type: string -concurrency: ${{inputs.resource_id}} +concurrency: ${{inputs.resource_id}}-call env: S3_HOST: ${{ inputs.S3_HOST }} @@ -70,14 +70,14 @@ jobs: conda-forge::bioimageio.spec minio loguru - continue-on-error: true # we inspect this step's outcome in test_dynamically.py + continue-on-error: true # we inspect this step's outcome in run_dynamic_tests.py timeout-minutes: 60 - name: install minimal script dependencies if val env failed if: ${{ steps.create_env.outcome != 'success' }} run: pip install typer bioimageio.spec minio loguru - name: dynamic validation shell: bash -l {0} - run: python scripts/test_dynamically.py "${{inputs.resource_id}}" "${{needs.stage.outputs.version}}" "${{ matrix.weight_format }}" "${{ steps.create_env.outcome }}" + run: python scripts/run_dynamic_tests.py "${{inputs.resource_id}}" "${{needs.stage.outputs.version}}" "${{ matrix.weight_format }}" "${{ steps.create_env.outcome }}" timeout-minutes: 60 conclude: diff --git a/pyproject.toml b/pyproject.toml index da9cb67a..5c7372e3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,4 +20,4 @@ useLibraryCodeForTypes = true [tool.pytest.ini_options] addopts = "--capture=no --doctest-modules --failed-first" -testpaths = ["scripts"] +testpaths = ["scripts", "tests"] diff --git a/scripts/backup.py b/scripts/backup.py new file mode 100644 index 00000000..9b5cb991 --- /dev/null +++ b/scripts/backup.py @@ -0,0 +1,25 @@ +import os + +import typer +from dotenv import load_dotenv +from loguru import logger +from utils.s3_client import Client + +_ = load_dotenv() + + +def backup(): + """backup collection + + Returns: + list of folders and file names backed up + """ + client = Client() + content_to_backup = list(client.ls("")) + destination = os.environ["ZENODO_URL"] + logger.error("Backup to '{}': {}", destination, content_to_backup) + return content_to_backup + + +if __name__ == "__main__": + typer.run(backup) diff --git a/scripts/publish.py b/scripts/publish.py new file mode 100644 index 00000000..4e70106c --- /dev/null +++ b/scripts/publish.py @@ -0,0 +1,13 @@ +import typer +from utils.remote_resource import PublishedVersion, StagedVersion +from utils.s3_client import Client + + +def publish(resource_id: str, stage_nr: int): + staged = StagedVersion(client=Client(), id=resource_id, version=stage_nr) + published = staged.publish() + assert isinstance(published, PublishedVersion) + + +if __name__ == "__main__": + typer.run(publish) diff --git a/scripts/test_dynamically.py b/scripts/run_dynamic_tests.py similarity index 98% rename from scripts/test_dynamically.py rename to scripts/run_dynamic_tests.py index 37523a95..f3fea8b7 100644 --- a/scripts/test_dynamically.py +++ b/scripts/run_dynamic_tests.py @@ -43,7 +43,7 @@ def get_summary_detail_from_exception(name: str, exception: Exception): ) -def test_dynamically( +def run_dynamic_tests( resource_id: str, version: int, weight_format: Optional[WeightsFormat] = typer.Argument( @@ -127,4 +127,4 @@ def test_dynamically( if __name__ == "__main__": - typer.run(test_dynamically) + typer.run(run_dynamic_tests) diff --git a/tests/test_scripts/test_utils/test_remote_resource.py b/tests/test_scripts/test_utils/test_remote_resource.py index 0c94f47a..76b03194 100644 --- a/tests/test_scripts/test_utils/test_remote_resource.py +++ b/tests/test_scripts/test_utils/test_remote_resource.py @@ -7,6 +7,7 @@ def test_lifecycle( client: "Client", package_url: str, package_id: str, s3_test_folder_url: str ): + from scripts.backup import backup from scripts.utils.remote_resource import ( PublishedVersion, RemoteResource, @@ -27,3 +28,6 @@ def test_lifecycle( assert ( published_rdf_url == f"{s3_test_folder_url}frank-water-buffalo/1/files/rdf.yaml" ) + + backed_up = backup() + assert backed_up == ["frank-water-buffalo"]