Skip to content

Commit

Permalink
Merge pull request #16 from bioimage-io/separate_test
Browse files Browse the repository at this point in the history
Separate test from stage
  • Loading branch information
FynnBe authored Mar 20, 2024
2 parents 99961b7 + 095474d commit 8c9bc21
Show file tree
Hide file tree
Showing 17 changed files with 381 additions and 228 deletions.
115 changes: 115 additions & 0 deletions .github/workflows/build_backoffice.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
name: test backoffice and build docs

on: push

concurrency: test

env:
S3_HOST: ${{vars.S3_HOST}}
S3_BUCKET: ${{vars.S3_TEST_BUCKET}} # testing!
S3_FOLDER: ${{vars.S3_TEST_FOLDER}} # testing!
S3_TEST_BUCKET: ${{vars.S3_TEST_BUCKET}}
S3_TEST_FOLDER: ${{vars.S3_TEST_FOLDER}}
S3_PYTEST_FOLDER: ${{vars.S3_PYTEST_FOLDER}}
ZENODO_URL: ${{vars.ZENODO_TEST_URL}} # testing!
ZENODO_TEST_URL: ${{vars.ZENODO_TEST_URL}}

S3_ACCESS_KEY_ID: ${{secrets.S3_ACCESS_KEY_ID}}
S3_SECRET_ACCESS_KEY: ${{secrets.S3_SECRET_ACCESS_KEY}}
ZENODO_API_ACCESS_TOKEN: ${{secrets.ZENODO_TEST_API_ACCESS_TOKEN}} # testing!
ZENODO_TEST_API_ACCESS_TOKEN: ${{secrets.ZENODO_TEST_API_ACCESS_TOKEN}}

TEST_PACKAGE_ID: ${{vars.TEST_PACKAGE_ID}}
TEST_PACKAGE_URL: ${{vars.TEST_PACKAGE_URL}}

jobs:
initial-cleanup:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: "3.12"
cache: "pip" # caching pip dependencies
- run: pip install .
- run: backoffice wipe --bucket "${{vars.S3_TEST_BUCKET}}" --prefix "${{vars.S3_TEST_FOLDER}}"

test-stage-wf:
needs: initial-cleanup
uses: ./.github/workflows/stage_call.yaml
with:
resource_id: ${{vars.TEST_PACKAGE_ID}} # testing!
package_url: ${{vars.TEST_PACKAGE_URL}} # testing!
S3_HOST: ${{vars.S3_HOST}}
S3_BUCKET: ${{vars.S3_TEST_BUCKET}} # testing!
S3_FOLDER: ${{vars.S3_TEST_FOLDER}}/ci # testing!
secrets: inherit

test-publish-wf:
needs: test-stage-wf
uses: ./.github/workflows/publish_call.yaml
with:
resource_id: ${{vars.TEST_PACKAGE_ID}} # testing!
stage_number: 1
S3_HOST: ${{vars.S3_HOST}}
S3_BUCKET: ${{vars.S3_TEST_BUCKET}} # testing!
S3_FOLDER: ${{vars.S3_TEST_FOLDER}}/ci # testing!
ZENODO_URL: ${{vars.ZENODO_TEST_URL}} # testing!
secrets: inherit

test-generate-collection-json-wf:
needs: test-publish-wf
uses: ./.github/workflows/generate_collection_json_call.yaml
with:
S3_HOST: ${{vars.S3_HOST}}
S3_BUCKET: ${{vars.S3_TEST_BUCKET}} # testing!
S3_FOLDER: ${{vars.S3_TEST_FOLDER}}/ci # testing!
secrets: inherit

test-backup-wf:
needs: test-generate-collection-json-wf
uses: ./.github/workflows/backup_call.yaml
with:
S3_HOST: ${{vars.S3_HOST}}
S3_BUCKET: ${{vars.S3_TEST_BUCKET}} # testing!
S3_FOLDER: ${{vars.S3_TEST_FOLDER}}/ci # testing!
ZENODO_URL: ${{vars.ZENODO_TEST_URL}} # testing!
secrets: inherit

build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: "3.12"
cache: "pip" # caching pip dependencies
- run: pip install .[dev]
- run: black . --check
- run: pyright -p pyproject.toml
- run: pytest
- name: export documentation
run: pdoc backoffice -o ./docs
- uses: actions/upload-pages-artifact@v3
if: ${{ github.ref == 'refs/heads/main' }}
with:
path: docs/

deploy_docs:
needs: build
if: ${{ github.ref == 'refs/heads/main' }}
# Grant GITHUB_TOKEN the permissions required to make a Pages deployment
permissions:
pages: write # to deploy to Pages
id-token: write # to verify the deployment originates from an appropriate source

# Deploy to the github-pages environment
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}

runs-on: ubuntu-latest
steps:
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v4
20 changes: 15 additions & 5 deletions .github/workflows/email_call.yaml
Original file line number Diff line number Diff line change
@@ -1,9 +1,19 @@
# Emails resource uploader about the status of test results alerting them
# about whether they need to take any actions to fix issues with their upload
# Sends an email from the bioimageiobot

name: email call
name: email

on:
workflow_dispatch: # dipatch only for testing
inputs:
user_email:
description: "Email address of user"
required: true
type: string
message:
description: "email body"
required: false
type: string

workflow_call:
inputs:
user_email:
Expand Down Expand Up @@ -37,15 +47,15 @@ jobs:
# Optional whether this connection use TLS (default is true if server_port is 465)
secure: true
# Optional (recommended) mail server username:
username: ${{secrets.MAIL_USERNAME}}
username: [email protected]
# Optional (recommended) mail server password:
password: ${{secrets.MAIL_PASSWORD}}
# Required mail subject:
subject: Github Actions job result
# Required recipients' addresses:
to: ${{ inputs.user_email }}
# Required sender full name (address can be skipped):
from: ${{ env.MAIL_FROM }}
from: [email protected]
# Optional plain body:
body: ${{ inputs.message }}
# Optional HTML body read from file:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/generate_collection_json_call.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -32,4 +32,4 @@ jobs:
python-version: "3.12"
cache: "pip" # caching pip dependencies
- run: pip install .
- run: backoffice genreate-collection-json
- run: backoffice generate-collection-json
2 changes: 1 addition & 1 deletion .github/workflows/publish_call.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ jobs:
python-version: "3.12"
cache: "pip" # caching pip dependencies
- run: pip install .
- run: backoffice publish "${{ inputs.resource_id }}" "${{ inputs.stage_number }}"
- run: backoffice publish "${{ inputs.resource_id }}" "staged/${{ inputs.stage_number }}"
# - name: Publish to Zenodo
# run: |
# python .github/scripts/update_status.py "${{ inputs.resource_path }}" "Publishing to Zenodo" "5"
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/request_changes_call.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -49,4 +49,4 @@ jobs:
python-version: "3.12"
cache: "pip" # caching pip dependencies
- run: pip install .
- run: backoffice request-changes "${{ inputs.resource_id }}" "${{ inputs.stage_number }}" "${{ inputs.reason }}"
- run: backoffice request-changes "${{ inputs.resource_id }}" "staged/${{ inputs.stage_number }}" "${{ inputs.reason }}"
12 changes: 11 additions & 1 deletion .github/workflows/stage.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ on:
concurrency: ${{inputs.resource_id}}

jobs:
call:
stage:
uses: ./.github/workflows/stage_call.yaml
with:
resource_id: ${{inputs.resource_id}}
Expand All @@ -24,3 +24,13 @@ jobs:
S3_BUCKET: ${{vars.S3_BUCKET}}
S3_FOLDER: ${{vars.S3_FOLDER}}
secrets: inherit

test:
uses: ./.github/workflows/test_call.yaml
with:
resource_id: ${{inputs.resource_id}}
version: ${{inputs.package_url}}
S3_HOST: ${{vars.S3_HOST}}
S3_BUCKET: ${{vars.S3_BUCKET}}
S3_FOLDER: ${{vars.S3_FOLDER}}
secrets: inherit
60 changes: 5 additions & 55 deletions .github/workflows/stage_call.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,10 @@ on:
S3_FOLDER:
required: true
type: string
outputs:
firstword:
description: "The first output string"
value: ${{ jobs.example_job.outputs.output1 }}

concurrency: ${{inputs.resource_id}}-call

Expand All @@ -31,7 +35,7 @@ env:
S3_SECRET_ACCESS_KEY: ${{secrets.S3_SECRET_ACCESS_KEY}}

jobs:
stage:
run:
runs-on: ubuntu-latest
outputs:
version: ${{ steps.stage.outputs.version }}
Expand All @@ -48,57 +52,3 @@ jobs:
- id: stage
run: backoffice stage "${{ inputs.resource_id }}" "${{ inputs.package_url }}"

test:
needs: stage
if: needs.stage.outputs.has_dynamic_test_cases == 'yes'
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix: ${{ fromJson(needs.stage.outputs.dynamic_test_cases) }} # include: [{weight_format: ...}, ...]
max-parallel: 1 # avoid prallel updates to log.json
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: "3.12"
cache: "pip" # caching pip dependencies
- run: pip install ruyaml
- name: save conda_env_${{ matrix.weight_format }}.yaml
run: |
import json
from pathlib import Path
from ruyaml import YAML
yaml = YAML(typ="safe")
conda_env = json.loads('${{ needs.stage.outputs.conda_envs }}')["${{ matrix.weight_format }}"]
yaml.dump(conda_env, Path("conda_env_${{ matrix.weight_format }}.yaml"))
shell: python
- name: install validation dependencies
id: create_env
uses: mamba-org/setup-micromamba@v1
with:
cache-downloads: true
environment-name: ${{ matrix.weight_format }}
environment-file: conda_env_${{ matrix.weight_format }}.yaml
continue-on-error: true # we inspect this step's outcome in run_dynamic_tests.py
timeout-minutes: 60
- run: pip install .
shell: bash -l {0}
- name: dynamic validation
shell: bash -l {0}
run: backoffice test "${{inputs.resource_id}}" "${{needs.stage.outputs.version}}" "${{ matrix.weight_format }}" "${{ steps.create_env.outcome }}"
timeout-minutes: 60

conclude:
needs: [stage, test]
if: always() # run even if test job fails
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: "3.12"
cache: "pip" # caching pip dependencies
- run: pip install .
- run: backoffice await-review "${{ inputs.resource_id }}" "${{needs.stage.outputs.version}}"

# TODO: call emailer
Loading

0 comments on commit 8c9bc21

Please sign in to comment.