Skip to content

Commit

Permalink
bump airflow version
Browse files Browse the repository at this point in the history
  • Loading branch information
antonysouthworth-halter committed Oct 3, 2024
1 parent 7edbe73 commit 17a4e3e
Show file tree
Hide file tree
Showing 5 changed files with 68 additions and 42 deletions.
2 changes: 2 additions & 0 deletions .tool-versions
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
golang 1.20
terraform 1.8.5
8 changes: 7 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
AIRFLOW_VERSION=2.10.2

TEST?=$$(go list ./...)
ACCTEST_PARALLELISM?=20

Expand All @@ -8,4 +10,8 @@ testacc:
TF_ACC=1 go test $(TEST) -parallel $(ACCTEST_PARALLELISM) -v $(TESTARGS) -timeout 5m

test:
go test
go test

.PHONY: docker-compose.yaml
docker-compose.yaml:
curl https://airflow.apache.org/docs/apache-airflow/2.10.2/docker-compose.yaml >$@
72 changes: 40 additions & 32 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
# The following variables are supported:
#
# AIRFLOW_IMAGE_NAME - Docker image name used to run Airflow.
# Default: apache/airflow:2.5.1
# Default: apache/airflow:2.10.2
# AIRFLOW_UID - User ID in Airflow containers
# Default: 50000
# AIRFLOW_PROJ_DIR - Base path to which all the files will be volumed.
Expand All @@ -36,34 +36,46 @@
# _AIRFLOW_WWW_USER_PASSWORD - Password for the administrator account (if requested).
# Default: airflow
# _PIP_ADDITIONAL_REQUIREMENTS - Additional PIP requirements to add when starting all containers.
# Use this option ONLY for quick checks. Installing requirements at container
# startup is done EVERY TIME the service is started.
# A better way is to build a custom image or extend the official image
# as described in https://airflow.apache.org/docs/docker-stack/build.html.
# Default: ''
#
# Feel free to modify this file to suit your needs.
---
version: '3'
x-airflow-common:
&airflow-common
# In order to add custom dependencies or upgrade provider packages you can use your extended image.
# Comment the image line, place your Dockerfile in the directory where you placed the docker-compose.yaml
# and uncomment the "build" line below, Then run `docker-compose build` to build the images.
image: ${AIRFLOW_IMAGE_NAME:-apache/airflow:2.5.1}
image: ${AIRFLOW_IMAGE_NAME:-apache/airflow:2.10.2-python3.12}
# build: .
environment:
&airflow-common-env
AIRFLOW__CORE__EXECUTOR: CeleryExecutor
AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow
# For backward compatibility, with Airflow <2.3
AIRFLOW__CORE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow
AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://airflow:airflow@postgres/airflow
AIRFLOW__CELERY__BROKER_URL: redis://:@redis:6379/0
AIRFLOW__CORE__FERNET_KEY: ''
AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'true'
AIRFLOW__CORE__LOAD_EXAMPLES: 'true'
AIRFLOW__API__AUTH_BACKENDS: 'airflow.api.auth.backend.basic_auth,airflow.api.auth.backend.session'
# yamllint disable rule:line-length
# Use simple http server on scheduler for health checks
# See https://airflow.apache.org/docs/apache-airflow/stable/administration-and-deployment/logging-monitoring/check-health.html#scheduler-health-check-server
# yamllint enable rule:line-length
AIRFLOW__SCHEDULER__ENABLE_HEALTH_CHECK: 'true'
# WARNING: Use _PIP_ADDITIONAL_REQUIREMENTS option ONLY for a quick checks
# for other purpose (development, test and especially production usage) build/extend Airflow image.
_PIP_ADDITIONAL_REQUIREMENTS: ${_PIP_ADDITIONAL_REQUIREMENTS:-}
# The following line can be used to set a custom config file, stored in the local config folder
# If you want to use it, outcomment it and replace airflow.cfg with the name of your config file
# AIRFLOW_CONFIG: '/opt/airflow/config/airflow.cfg'
volumes:
- ${AIRFLOW_PROJ_DIR:-.}/dags:/opt/airflow/dags
- ${AIRFLOW_PROJ_DIR:-.}/logs:/opt/airflow/logs
- ${AIRFLOW_PROJ_DIR:-.}/config:/opt/airflow/config
- ${AIRFLOW_PROJ_DIR:-.}/plugins:/opt/airflow/plugins
user: "${AIRFLOW_UID:-50000}:0"
depends_on:
Expand All @@ -84,31 +96,36 @@ services:
- postgres-db-volume:/var/lib/postgresql/data
healthcheck:
test: ["CMD", "pg_isready", "-U", "airflow"]
interval: 5s
interval: 10s
retries: 5
start_period: 5s
restart: always

redis:
image: redis:latest
# Redis is limited to 7.2-bookworm due to licencing change
# https://redis.io/blog/redis-adopts-dual-source-available-licensing/
image: redis:7.2-bookworm
expose:
- 6379
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 5s
interval: 10s
timeout: 30s
retries: 50
start_period: 30s
restart: always

airflow-webserver:
<<: *airflow-common
command: webserver
ports:
- 8080:8080
- "8080:8080"
healthcheck:
test: ["CMD", "curl", "--fail", "http://localhost:8080/health"]
interval: 10s
interval: 30s
timeout: 10s
retries: 5
start_period: 30s
restart: always
depends_on:
<<: *airflow-common-depends-on
Expand All @@ -119,10 +136,11 @@ services:
<<: *airflow-common
command: scheduler
healthcheck:
test: ["CMD-SHELL", 'airflow jobs check --job-type SchedulerJob --hostname "$${HOSTNAME}"']
interval: 10s
test: ["CMD", "curl", "--fail", "http://localhost:8974/health"]
interval: 30s
timeout: 10s
retries: 5
start_period: 30s
restart: always
depends_on:
<<: *airflow-common-depends-on
Expand All @@ -133,12 +151,14 @@ services:
<<: *airflow-common
command: celery worker
healthcheck:
# yamllint disable rule:line-length
test:
- "CMD-SHELL"
- 'celery --app airflow.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}"'
interval: 10s
- 'celery --app airflow.providers.celery.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}" || celery --app airflow.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}"'
interval: 30s
timeout: 10s
retries: 5
start_period: 30s
environment:
<<: *airflow-common-env
# Required to handle warm shutdown of the celery workers properly
Expand All @@ -155,9 +175,10 @@ services:
command: triggerer
healthcheck:
test: ["CMD-SHELL", 'airflow jobs check --job-type TriggererJob --hostname "$${HOSTNAME}"']
interval: 10s
interval: 30s
timeout: 10s
retries: 5
start_period: 30s
restart: always
depends_on:
<<: *airflow-common-depends-on
Expand All @@ -171,20 +192,6 @@ services:
command:
- -c
- |
function ver() {
printf "%04d%04d%04d%04d" $${1//./ }
}
airflow_version=$$(AIRFLOW__LOGGING__LOGGING_LEVEL=INFO && gosu airflow airflow version)
airflow_version_comparable=$$(ver $${airflow_version})
min_airflow_version=2.2.0
min_airflow_version_comparable=$$(ver $${min_airflow_version})
if (( airflow_version_comparable < min_airflow_version_comparable )); then
echo
echo -e "\033[1;31mERROR!!!: Too old Airflow version $${airflow_version}!\e[0m"
echo "The minimum Airflow version supported: $${min_airflow_version}. Only use this or higher!"
echo
exit 1
fi
if [[ -z "${AIRFLOW_UID}" ]]; then
echo
echo -e "\033[1;33mWARNING!!!: AIRFLOW_UID not set!\e[0m"
Expand Down Expand Up @@ -233,7 +240,7 @@ services:
# yamllint enable rule:line-length
environment:
<<: *airflow-common-env
_AIRFLOW_DB_UPGRADE: 'true'
_AIRFLOW_DB_MIGRATE: 'true'
_AIRFLOW_WWW_USER_CREATE: 'true'
_AIRFLOW_WWW_USER_USERNAME: ${_AIRFLOW_WWW_USER_USERNAME:-airflow}
_AIRFLOW_WWW_USER_PASSWORD: ${_AIRFLOW_WWW_USER_PASSWORD:-airflow}
Expand Down Expand Up @@ -264,12 +271,13 @@ services:
profiles:
- flower
ports:
- 5555:5555
- "5555:5555"
healthcheck:
test: ["CMD", "curl", "--fail", "http://localhost:5555/"]
interval: 10s
interval: 30s
timeout: 10s
retries: 5
start_period: 30s
restart: always
depends_on:
<<: *airflow-common-depends-on
Expand Down
10 changes: 9 additions & 1 deletion internal/provider/resource_variable.go
Original file line number Diff line number Diff line change
Expand Up @@ -96,11 +96,19 @@ func resourceVariableUpdate(ctx context.Context, d *schema.ResourceData, m inter

if v, ok := d.GetOk("description"); ok {
variableReq.SetDescription(v.(string))
} else {
variableReq.SetDescription("")
}

_, resp, err := client.VariableApi.PatchVariable(pcfg.AuthContext, key).Variable(variableReq).Execute()
if err != nil {
return diag.Errorf("failed to update variable `%s`, Status: `%s` from Airflow: %s", key, resp.Status, err)
responseBody := make([]byte, resp.ContentLength)
_, err2 := resp.Body.Read(responseBody)
if err2 != nil {
return diag.Errorf("error reading response from Airflow: %s", err2)
}

return diag.Errorf("failed to update variable `%s`, Status: `%s` from Airflow: %s\n%s", key, resp.Status, err, string(responseBody))
}

return resourceVariableRead(ctx, d, m)
Expand Down
18 changes: 10 additions & 8 deletions internal/provider/resource_variable_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ func TestAccAirflowVariable_basic(t *testing.T) {
func TestAccAirflowVariable_desc(t *testing.T) {
rName := acctest.RandomWithPrefix("tf-acc-test")
rNameUpdated := acctest.RandomWithPrefix("tf-acc-test")
rDesc := acctest.RandomWithPrefix("tf-acc-test")

resourceName := "airflow_variable.test"
resource.Test(t, resource.TestCase{
Expand All @@ -53,11 +54,11 @@ func TestAccAirflowVariable_desc(t *testing.T) {
CheckDestroy: testAccCheckAirflowVariableCheckDestroy,
Steps: []resource.TestStep{
{
Config: testAccAirflowVariableConfigDesc(rName, rName),
Config: testAccAirflowVariableConfigDesc(rName, rName, rDesc),
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr(resourceName, "key", rName),
resource.TestCheckResourceAttr(resourceName, "value", rName),
resource.TestCheckResourceAttr(resourceName, "description", rName),
resource.TestCheckResourceAttr(resourceName, "description", rDesc),
),
},
{
Expand All @@ -66,19 +67,20 @@ func TestAccAirflowVariable_desc(t *testing.T) {
ImportStateVerify: true,
},
{
Config: testAccAirflowVariableConfigDesc(rName, rNameUpdated),
Config: testAccAirflowVariableConfigDesc(rName, rNameUpdated, rDesc),
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr(resourceName, "key", rName),
resource.TestCheckResourceAttr(resourceName, "value", rNameUpdated),
resource.TestCheckResourceAttr(resourceName, "description", rNameUpdated),
resource.TestCheckResourceAttr(resourceName, "description", rDesc),
),
},
{
Config: testAccAirflowVariableConfigBasic(rName, rName),
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr(resourceName, "key", rName),
resource.TestCheckResourceAttr(resourceName, "value", rName),
resource.TestCheckNoResourceAttr(resourceName, "description"),
// bug? Airflow API seems unhappy with trying to set description to null.
resource.TestCheckResourceAttr(resourceName, "description", ""),
),
},
},
Expand Down Expand Up @@ -117,12 +119,12 @@ resource "airflow_variable" "test" {
`, rName, value)
}

func testAccAirflowVariableConfigDesc(rName, value string) string {
func testAccAirflowVariableConfigDesc(rName, value, desc string) string {
return fmt.Sprintf(`
resource "airflow_variable" "test" {
key = %[1]q
value = %[2]q
description = "test"
description = %[3]q
}
`, rName, value)
`, rName, value, desc)
}

0 comments on commit 17a4e3e

Please sign in to comment.