Skip to content

Merge pull request #14690 from Habbie/backport-14686-to-auth-4.7.x #9968

Merge pull request #14690 from Habbie/backport-14686-to-auth-4.7.x

Merge pull request #14690 from Habbie/backport-14686-to-auth-4.7.x #9968

---
name: 'Build and test everything'
on:
push:
pull_request:
workflow_call:
inputs:
branch-name:
description: 'Checkout to a specific branch'
required: true
default: ''
type: string
schedule:
- cron: '0 22 * * 3'
permissions: # least privileges, see https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#permissions
contents: read
jobs:
build-auth:
name: build auth
runs-on: ubuntu-20.04
env:
UBSAN_OPTIONS: "print_stacktrace=1:halt_on_error=1:suppressions=${{ github.workspace }}/build-scripts/UBSan.supp"
ASAN_OPTIONS: detect_leaks=0
steps:
- uses: PowerDNS/pdns/set-ubuntu-mirror@meta
- uses: actions/checkout@v3
with:
fetch-depth: 5
submodules: recursive
ref: ${{ inputs.branch-name }}
- name: get timestamp for cache
id: get-stamp
run: |
echo "stamp=$(/bin/date +%s)" >> "$GITHUB_OUTPUT"
shell: bash
- name: let GitHub cache our ccache data
uses: actions/cache@v3
with:
path: ~/.ccache
key: auth-ccache-${{ steps.get-stamp.outputs.stamp }}
restore-keys: auth-ccache-
- run: build-scripts/gh-actions-setup-inv # this runs apt update+upgrade
- run: inv install-clang
- run: inv install-auth-build-deps
- run: inv ci-autoconf
- run: inv ci-auth-configure
- run: inv ci-auth-make
- run: inv ci-auth-install-remotebackend-test-deps
- run: inv ci-auth-run-unit-tests
- run: inv ci-make-install
- run: ccache -s
- run: echo "normalized-branch-name=${{ inputs.branch-name || github.ref_name }}" | tr "/" "-" >> "$GITHUB_ENV"
- name: Store the binaries
uses: actions/upload-artifact@v3 # this takes 30 seconds, maybe we want to tar
with:
name: pdns-auth-${{ env.normalized-branch-name }}
path: /opt/pdns-auth
retention-days: 1
test-auth-api:
needs: build-auth
runs-on: ubuntu-20.04
env:
UBSAN_OPTIONS: "print_stacktrace=1:halt_on_error=1:suppressions=${{ github.workspace }}/build-scripts/UBSan.supp"
ASAN_OPTIONS: detect_leaks=0
TSAN_OPTIONS: "halt_on_error=1:suppressions=${{ github.workspace }}/pdns/dnsdistdist/dnsdist-tsan.supp"
strategy:
matrix:
include:
- backend: gsqlite3
image: coscale/docker-sleep
- backend: gmysql
image: mysql:5
- backend: gpgsql
image: postgres:9
- backend: lmdb
image: coscale/docker-sleep
fail-fast: false
services:
database:
image: ${{ matrix.image }}
env:
POSTGRES_USER: runner
POSTGRES_HOST_AUTH_METHOD: trust
MYSQL_ALLOW_EMPTY_PASSWORD: 1
ports:
- 3306:3306
- 5432:5432
# FIXME: this works around dist-upgrade stopping all docker containers. dist-upgrade is huge on these images anyway. Perhaps we do want to run our tasks in a Docker container too.
options: >-
--restart always
steps:
- uses: PowerDNS/pdns/set-ubuntu-mirror@meta
- uses: actions/checkout@v3
with:
fetch-depth: 5
submodules: recursive
ref: ${{ inputs.branch-name }}
- run: echo "normalized-branch-name=${{ inputs.branch-name || github.ref_name }}" | tr "/" "-" >> "$GITHUB_ENV"
- name: Fetch the binaries
uses: actions/download-artifact@v3
with:
name: pdns-auth-${{ env.normalized-branch-name }}
path: /opt/pdns-auth
# - name: Setup upterm session
# uses: lhotari/action-upterm@v1
- run: build-scripts/gh-actions-setup-inv # this runs apt update+upgrade
- run: inv install-clang-runtime
- run: inv install-auth-test-deps -b ${{ matrix.backend }}
- run: inv test-api auth -b ${{ matrix.backend }}
test-auth-backend:
needs: build-auth
runs-on: ubuntu-20.04
env:
UBSAN_OPTIONS: "print_stacktrace=1:halt_on_error=1:suppressions=${{ github.workspace }}/build-scripts/UBSan.supp"
ASAN_OPTIONS: detect_leaks=0
LDAPHOST: ldap://ldapserver/
strategy:
matrix:
include:
- backend: remote
image: coscale/docker-sleep
env: {}
ports: []
- backend: gmysql
image: mysql:5
env:
MYSQL_ALLOW_EMPTY_PASSWORD: 1
ports:
- 3306:3306
- backend: gmysql
image: mariadb:10
env:
MYSQL_ALLOW_EMPTY_PASSWORD: 1
ports:
- 3306:3306
- backend: gpgsql
image: postgres:9
env:
POSTGRES_USER: runner
POSTGRES_HOST_AUTH_METHOD: trust
ports:
- 5432:5432
- backend: gsqlite3 # this also runs regression-tests.nobackend and pdnsutil test-algorithms
image: coscale/docker-sleep
env: {}
ports: []
- backend: lmdb
image: coscale/docker-sleep
env: {}
ports: []
- backend: bind
image: coscale/docker-sleep
env: {}
ports: []
- backend: geoip
image: coscale/docker-sleep
env: {}
ports: []
- backend: lua2
image: coscale/docker-sleep
env: {}
ports: []
- backend: tinydns
image: coscale/docker-sleep
env: {}
ports: []
- backend: authpy
image: coscale/docker-sleep
env: {}
ports: []
- backend: godbc_sqlite3
image: coscale/docker-sleep
env: {}
ports: []
- backend: godbc_mssql
image: mcr.microsoft.com/mssql/server:2022-CU12-ubuntu-22.04
env:
ACCEPT_EULA: Y
SA_PASSWORD: 'SAsa12%%'
ports:
- 1433:1433
- backend: ldap
image: powerdns/ldap-regress:1.2.4-1
env:
LDAP_LOG_LEVEL: 0
CONTAINER_LOG_LEVEL: 4
ports:
- 389:389
- backend: geoip_mmdb
image: coscale/docker-sleep
env: {}
ports: []
fail-fast: false
services:
database:
image: ${{ matrix.image }}
env: ${{ matrix.env }}
ports: ${{ matrix.ports }}
# FIXME: this works around dist-upgrade stopping all docker containers. dist-upgrade is huge on these images anyway. Perhaps we do want to run our tasks in a Docker container too.
options: >-
--restart always
steps:
- uses: PowerDNS/pdns/set-ubuntu-mirror@meta
- uses: actions/checkout@v3
with:
fetch-depth: 5
submodules: recursive
ref: ${{ inputs.branch-name }}
- run: echo "normalized-branch-name=${{ inputs.branch-name || github.ref_name }}" | tr "/" "-" >> "$GITHUB_ENV"
- name: Fetch the binaries
uses: actions/download-artifact@v3
with:
name: pdns-auth-${{ env.normalized-branch-name }}
path: /opt/pdns-auth
# - name: Setup upterm session
# uses: lhotari/action-upterm@v1
# FIXME: install recursor for backends that have ALIAS
- run: build-scripts/gh-actions-setup-inv # this runs apt update+upgrade
- run: inv install-clang-runtime
- run: inv install-auth-test-deps -b ${{ matrix.backend }}
- run: inv test-auth-backend -b ${{ matrix.backend }}
test-ixfrdist:
needs: build-auth
runs-on: ubuntu-20.04
env:
UBSAN_OPTIONS: "print_stacktrace=1:halt_on_error=1:suppressions=${{ github.workspace }}/build-scripts/UBSan.supp"
ASAN_OPTIONS: detect_leaks=0
steps:
- uses: PowerDNS/pdns/set-ubuntu-mirror@meta
- uses: actions/checkout@v3
with:
fetch-depth: 5
submodules: recursive
ref: ${{ inputs.branch-name }}
- run: echo "normalized-branch-name=${{ inputs.branch-name || github.ref_name }}" | tr "/" "-" >> "$GITHUB_ENV"
- name: Fetch the binaries
uses: actions/download-artifact@v3
with:
name: pdns-auth-${{ env.normalized-branch-name }}
path: /opt/pdns-auth
- run: build-scripts/gh-actions-setup-inv # this runs apt update+upgrade
- run: inv install-clang-runtime
- run: inv install-auth-test-deps
- run: inv test-ixfrdist
swagger-syntax-check:
runs-on: ubuntu-20.04
steps:
- uses: PowerDNS/pdns/set-ubuntu-mirror@meta
- uses: actions/checkout@v3
with:
fetch-depth: 5
submodules: recursive
ref: ${{ inputs.branch-name }}
- run: build-scripts/gh-actions-setup-inv # this runs apt update+upgrade
- run: inv install-swagger-tools
- run: inv swagger-syntax-check
collect:
needs:
- build-auth
- swagger-syntax-check
- test-auth-api
- test-auth-backend
- test-ixfrdist
if: success() || failure()
runs-on: ubuntu-22.04
steps:
- name: Install jq and jc
run: "sudo apt-get update && sudo apt-get install jq jc"
- name: Fail job if any of the previous jobs failed
run: "for i in `echo '${{ toJSON(needs) }}' | jq -r '.[].result'`; do if [[ $i == 'failure' ]]; then echo '${{ toJSON(needs) }}'; exit 1; fi; done;"
- uses: actions/checkout@v3
with:
fetch-depth: 5
submodules: recursive
ref: ${{ inputs.branch-name }}
- name: Get list of jobs in the workflow
run: "cat .github/workflows/build-and-test-all.yml | jc --yaml | jq -rS '.[].jobs | keys | .[]' | grep -v collect | tee /tmp/workflow-jobs-list.yml"
- name: Get list of prerequisite jobs
run: "echo '${{ toJSON(needs) }}' | jq -rS 'keys | .[]' | tee /tmp/workflow-needs-list.yml"
- name: Fail if there is a job missing on the needs list
run: "if ! diff -q /tmp/workflow-jobs-list.yml /tmp/workflow-needs-list.yml; then exit 1; fi"
# FIXME: if we can make upload/download-artifact fasts, running unit tests outside of build can let regression tests start earlier