Skip to content

Provider Map Jobs

Provider Map Jobs #37

name: Provider Map Jobs
on:
workflow_dispatch:
schedule:
- cron: "0 0 * * 0"
jobs:
fetch_warehouse:
name: Fetch Warehouse Updates
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Authenticate with Google
uses: google-github-actions/auth@v2
with:
credentials_json: '${{ secrets.GCP_SA_KEY }}'
- name: Set up Cloud SDK
uses: google-github-actions/setup-gcloud@v2
- name: Download providers.csv from warehouse
run: |
# Log what version of the BigQuery CLI we're using
bq version
# An idiotic workaround so that `bq` will not pollute our CSV file with
# a welcome message that is COMPLETELY ignored by `--quiet`, `--headless`,
# and every other flag that implies "silence unnecessary output."
#
# https://stackoverflow.com/q/73177304
bq show
# Download the latest providers.csv from the warehouse
bash .github/resources/download_csv.sh
- uses: actions/setup-python@v5
with:
python-version: '3.10'
- name: Install dependencies, process providers, and update GeoJSON
run: |
pip install -r .github/resources/requirements.txt
python .github/resources/process_providers.py
- name: Create Pull Request
uses: peter-evans/create-pull-request@v6
with:
title: Provider Map Data Auto Update
body: |
It's that time again! The warehouse has delivered new data for us to use. This is an automatic pull request created by the `provider-map-jobs.yml` workflow; it is triggered via a cron that runs every Sunday at midnight UTC.
commit-message: "chore: auto-update provider data from warehouse"
add-paths: |
src/metadata/providers/providers.csv
src/metadata/providers/counties.geojson
base: main