Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add publish workflow #3

Merged
merged 7 commits into from
Feb 22, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 26 additions & 0 deletions .github/workflows/backup.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
name: backup

on:
schedule:
- cron: "0 0 * * 0" # every sunday at midnight

concurrency: ${{vars.ZENODO_URL}}

env:
S3_HOST: ${{vars.S3_HOST}}
S3_BUCKET: ${{vars.S3_BUCKET}}
S3_FOLDER: ${{vars.S3_FOLDER}}
ZENODO_URL: ${{vars.ZENODO_URL}}
S3_ACCESS_KEY_ID: ${{secrets.S3_ACCESS_KEY_ID}}
S3_SECRET_ACCESS_KEY: ${{secrets.S3_SECRET_ACCESS_KEY}}
ZENODO_API_ACCESS_TOKEN: ${{secrets.ZENODO_API_ACCESS_TOKEN}}

jobs:
call:
uses: ./.github/workflows/backup_call.yaml
with:
S3_HOST: ${{vars.S3_HOST}}
S3_BUCKET: ${{vars.S3_BUCKET}}
S3_FOLDER: ${{vars.S3_FOLDER}}
ZENODO_URL: ${{vars.ZENODO_URL}}
secrets: inherit
41 changes: 41 additions & 0 deletions .github/workflows/backup_call.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
name: backup

on:
workflow_call:
inputs:
S3_HOST:
required: true
type: string
S3_BUCKET:
required: true
type: string
S3_FOLDER:
required: true
type: string
ZENODO_URL:
required: true
type: string

concurrency: ${{ZENODO_URL}}-call

env:
S3_HOST: ${{inputs.S3_HOST}}
S3_BUCKET: ${{inputs.S3_BUCKET}}
S3_FOLDER: ${{inputs.S3_FOLDER}}
ZENODO_URL: ${{inputs.ZENODO_URL}}
S3_ACCESS_KEY_ID: ${{secrets.S3_ACCESS_KEY_ID}}
S3_SECRET_ACCESS_KEY: ${{secrets.S3_SECRET_ACCESS_KEY}}
ZENODO_API_ACCESS_TOKEN: ${{secrets.ZENODO_API_ACCESS_TOKEN}}

jobs:
publish:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: "3.12"
cache: "pip" # caching pip dependencies
- run: pip install -r requirements.txt
- run: |
python scripts/backup.py
36 changes: 36 additions & 0 deletions .github/workflows/publish.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
name: publish

on:
workflow_dispatch:
inputs:
resource_id:
description: "Bioimageio ID of the resource - to be used to access the resource on S3"
required: true
type: string
stage_nr:
description: stage nr to publish
required: true
type: number

concurrency: ${{inputs.resource_id}}

env:
S3_HOST: ${{vars.S3_HOST}}
S3_BUCKET: ${{vars.S3_BUCKET}}
S3_FOLDER: ${{vars.S3_FOLDER}}
S3_ACCESS_KEY_ID: ${{secrets.S3_ACCESS_KEY_ID}}
S3_SECRET_ACCESS_KEY: ${{secrets.S3_SECRET_ACCESS_KEY}}
ZENODO_URL: ${{vars.ZENODO_URL}}
ZENODO_API_ACCESS_TOKEN: ${{secrets.ZENODO_API_ACCESS_TOKEN}}

jobs:
call:
uses: ./.github/workflows/publish_call.yaml
with:
resource_id: ${{inputs.resource_id}}
stage_nr: ${{inputs.stage_nr}}
S3_HOST: ${{vars.S3_HOST}}
S3_BUCKET: ${{vars.S3_BUCKET}}
S3_FOLDER: ${{vars.S3_FOLDER}}
ZENODO_URL: ${{vars.ZENODO_URL}}
secrets: inherit
54 changes: 54 additions & 0 deletions .github/workflows/publish_call.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
name: publish

on:
workflow_call:
inputs:
resource_id:
description: "Bioimageio ID of the resource - to be used to access the resource on S3"
required: true
type: string
stage_nr:
description: stage nr to publish
required: true
type: number
S3_HOST:
required: true
type: string
S3_BUCKET:
required: true
type: string
S3_FOLDER:
required: true
type: string
ZENODO_URL:
required: true
type: string

concurrency: ${{inputs.resource_id}}-call

env:
S3_HOST: ${{inputs.S3_HOST}}
S3_BUCKET: ${{inputs.S3_BUCKET}}
S3_FOLDER: ${{inputs.S3_FOLDER}}
ZENODO_URL: ${{inputs.ZENODO_URL}}
S3_ACCESS_KEY_ID: ${{secrets.S3_ACCESS_KEY_ID}}
S3_SECRET_ACCESS_KEY: ${{secrets.S3_SECRET_ACCESS_KEY}}
ZENODO_API_ACCESS_TOKEN: ${{secrets.ZENODO_API_ACCESS_TOKEN}}

jobs:
publish:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: "3.12"
cache: "pip" # caching pip dependencies
- run: pip install -r requirements.txt
- run: |
python scripts/publish.py "${{ inputs.resource_id }}" "${{ inputs.stage_nr }}"
# - name: Publish to Zenodo
# run: |
# python .github/scripts/update_status.py "${{ inputs.resource_path }}" "Publishing to Zenodo" "5"
# python .github/scripts/upload_model_to_zenodo.py --resource_path "${{inputs.resource_path}}"
# python .github/scripts/update_status.py "${{ inputs.resource_path }}" "Publishing complete" "6"
6 changes: 3 additions & 3 deletions .github/workflows/stage_call.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ on:
required: true
type: string

concurrency: ${{inputs.resource_id}}
concurrency: ${{inputs.resource_id}}-call

env:
S3_HOST: ${{ inputs.S3_HOST }}
Expand Down Expand Up @@ -70,14 +70,14 @@ jobs:
conda-forge::bioimageio.spec
minio
loguru
continue-on-error: true # we inspect this step's outcome in test_dynamically.py
continue-on-error: true # we inspect this step's outcome in run_dynamic_tests.py
timeout-minutes: 60
- name: install minimal script dependencies if val env failed
if: ${{ steps.create_env.outcome != 'success' }}
run: pip install typer bioimageio.spec minio loguru
- name: dynamic validation
shell: bash -l {0}
run: python scripts/test_dynamically.py "${{inputs.resource_id}}" "${{needs.stage.outputs.version}}" "${{ matrix.weight_format }}" "${{ steps.create_env.outcome }}"
run: python scripts/run_dynamic_tests.py "${{inputs.resource_id}}" "${{needs.stage.outputs.version}}" "${{ matrix.weight_format }}" "${{ steps.create_env.outcome }}"
timeout-minutes: 60

conclude:
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,4 @@ useLibraryCodeForTypes = true

[tool.pytest.ini_options]
addopts = "--capture=no --doctest-modules --failed-first"
testpaths = ["scripts"]
testpaths = ["scripts", "tests"]
25 changes: 25 additions & 0 deletions scripts/backup.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import os

import typer
from dotenv import load_dotenv
from loguru import logger
from utils.s3_client import Client

_ = load_dotenv()


def backup():
"""backup collection

Returns:
list of folders and file names backed up
"""
client = Client()
content_to_backup = list(client.ls(""))
destination = os.environ["ZENODO_URL"]
logger.error("Backup to '{}': {}", destination, content_to_backup)
return content_to_backup


if __name__ == "__main__":
typer.run(backup)
13 changes: 13 additions & 0 deletions scripts/publish.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
import typer
from utils.remote_resource import PublishedVersion, StagedVersion
from utils.s3_client import Client


def publish(resource_id: str, stage_nr: int):
staged = StagedVersion(client=Client(), id=resource_id, version=stage_nr)
published = staged.publish()
assert isinstance(published, PublishedVersion)


if __name__ == "__main__":
typer.run(publish)
4 changes: 2 additions & 2 deletions scripts/test_dynamically.py → scripts/run_dynamic_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def get_summary_detail_from_exception(name: str, exception: Exception):
)


def test_dynamically(
def run_dynamic_tests(
resource_id: str,
version: int,
weight_format: Optional[WeightsFormat] = typer.Argument(
Expand Down Expand Up @@ -127,4 +127,4 @@ def test_dynamically(


if __name__ == "__main__":
typer.run(test_dynamically)
typer.run(run_dynamic_tests)
4 changes: 4 additions & 0 deletions tests/test_scripts/test_utils/test_remote_resource.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
def test_lifecycle(
client: "Client", package_url: str, package_id: str, s3_test_folder_url: str
):
from scripts.backup import backup
from scripts.utils.remote_resource import (
PublishedVersion,
RemoteResource,
Expand All @@ -27,3 +28,6 @@ def test_lifecycle(
assert (
published_rdf_url == f"{s3_test_folder_url}frank-water-buffalo/1/files/rdf.yaml"
)

backed_up = backup()
assert backed_up == ["frank-water-buffalo"]
Loading