Skip to content

Update the exposure func to handle filtering modelled datetimes and combining filters to customise exposure analysis #255

Update the exposure func to handle filtering modelled datetimes and combining filters to customise exposure analysis

Update the exposure func to handle filtering modelled datetimes and combining filters to customise exposure analysis #255

name: Image build and test
env:
IMAGE_NAME: geoscienceaustralia/dea-intertidal
on:
push:
branches:
- main
- tests
paths:
- 'intertidal/**'
- 'data/**'
- 'tests/**'
- '.github/workflows/dea-intertidal-image.yml'
- 'Dockerfile'
- 'requirements.in'
- 'setup.py'
- 'codecov.yaml'
pull_request:
branches:
- main
paths:
- 'intertidal/**'
- 'data/**'
- 'tests/**'
- '.github/workflows/dea-intertidal-image.yml'
- 'Dockerfile'
- 'requirements.in'
- 'setup.py'
- 'codecov.yaml'
release:
types: [edited, published]
permissions:
id-token: write # Required for requesting Json web token
contents: write # Required for actions/checkout
pull-requests: write # Required for validation results comment bot
jobs:
test:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
role-to-assume: arn:aws:iam::060378307146:role/github-actions-dea-notebooks--sandbox
aws-region: ap-southeast-2
role-duration-seconds: 7200 # 2 hours
- name: Get database credentials
run: |
username_password=$(aws ssm get-parameter --name /dea-sandbox-eks/sandbox_reader/db.creds --with-decryption --query Parameter.Value --output text)
echo DATACUBE_DB_URL=postgresql://${username_password}@localhost:5432/odc >> $GITHUB_ENV
- name: Open port forward to RDS
run: |
npx basti connect \
--custom-target-vpc vpc-086904199e505c1f6 \
--custom-target-host db-aurora-dea-sandbox-eks-1.cos5zfpkso9m.ap-southeast-2.rds.amazonaws.com \
--custom-target-port 5432 \
--local-port 5432 &
# Wait until the connection comes up, but, if it doesn't, don't hang forever.
npx wait-on --timeout 120000 --interval 1000 tcp:127.0.0.1:5432
echo "PGPORT=5432" >> $GITHUB_ENV
echo "PGHOST=localhost" >> $GITHUB_ENV
- name: Build DEA Intertidal docker image
timeout-minutes: 30
shell: bash
run: |
docker build -t dea_intertidal .
- name: Run integration tests
run: |
# Download tide modelling files and unzip
# TODO: Replace with S3 sync from dea-non-public-data
wget --no-verbose https://www.dropbox.com/s/uemd8ib2vfw5nad/tide_models.zip?dl=1 -O tide_models.zip
unzip -q tide_models.zip
# Run integration tests using Docker, setting up datacube access, AWS configuration and
# adding volumes that provide access to tide model data and allow us to export artifacts
# from the run
docker run \
--net=host \
--env DATACUBE_DB_URL \
--env AWS_SESSION_TOKEN \
--env AWS_REGION \
--env AWS_ACCESS_KEY_ID \
--env AWS_SECRET_ACCESS_KEY \
--env AWS_SESSION_TOKEN \
--volume ${GITHUB_WORKSPACE}:/code \
--volume ${GITHUB_WORKSPACE}/tide_models:/var/share/tide_models \
--volume ${GITHUB_WORKSPACE}/artifacts:/mnt/artifacts \
dea_intertidal pytest -v --cov=intertidal --cov-report=xml tests
# Copy out validation outputs produced by the integration tests and place them
# in correct output locations so they can be committed back into the repository
cp ./artifacts/validation.jpg ./tests/validation.jpg
cp ./artifacts/validation.csv ./tests/validation.csv
cp ./artifacts/README.md ./tests/README.md
# Commit validation results produced by integration tests back into repo
- name: Commit validation results into repository
uses: stefanzweifel/git-auto-commit-action@v4
if: github.event_name == 'pull_request'
continue-on-error: true
with:
commit_message: Automatically update integration test validation results
file_pattern: 'tests/validation.jpg tests/validation.csv tests/README.md'
# Post validation tesults as comment on PR
- name: Post validation results as comment
uses: mshick/add-pr-comment@v2
if: github.event_name == 'pull_request'
with:
message: |
![](https://github.com/GeoscienceAustralia/dea-intertidal/blob/${{ github.head_ref }}/tests/validation.jpg?raw=true")
For full integration test results, refer to the [Tests directory README](https://github.com/GeoscienceAustralia/dea-intertidal/tree/${{ github.head_ref }}/tests).
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v3
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
push_ecr:
needs: [test]
runs-on: ubuntu-latest
# Only run on a push to the main branch OR a release
if: (github.event_name == 'push' && github.ref == 'refs/heads/main') || (github.event_name == 'release')
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v1
with:
role-to-assume: arn:aws:iam::538673716275:role/github-actions-role
aws-region: ap-southeast-2
- name: Get tag for this build if it exists
if: github.event_name == 'release'
run: |
echo "RELEASE=${GITHUB_REF/refs\/tags\/}" >> $GITHUB_ENV
- name: Push release image to ECR
uses: whoan/docker-build-with-cache-action@master
if: github.event_name == 'release'
with:
registry: 538673716275.dkr.ecr.ap-southeast-2.amazonaws.com
image_name: ${{ env.IMAGE_NAME }}
image_tag: ${{ env.RELEASE }}
- name: Get git commit hash for push to main
if: github.event_name != 'release'
run: |
echo "TAG=dev$(git rev-parse --short HEAD)" \
>> $GITHUB_ENV
- name: Push unstable image to ECR
uses: whoan/docker-build-with-cache-action@master
if: github.event_name != 'release'
with:
registry: 538673716275.dkr.ecr.ap-southeast-2.amazonaws.com
image_name: ${{ env.IMAGE_NAME }}
image_tag: latest,${{ env.TAG }}