-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
- Loading branch information
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,59 @@ | ||
# Licensed to the Apache Software Foundation (ASF) under one | ||
# or more contributor license agreements. See the NOTICE file | ||
# distributed with this work for additional information | ||
# regarding copyright ownership. The ASF licenses this file | ||
# to you under the Apache License, Version 2.0 (the | ||
# "License"); you may not use this file except in compliance | ||
# with the License. You may obtain a copy of the License at | ||
# | ||
# http://www.apache.org/licenses/LICENSE-2.0 | ||
# | ||
# Unless required by applicable law or agreed to in writing, | ||
# software distributed under the License is distributed on an | ||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY | ||
# KIND, either express or implied. See the License for the | ||
# specific language governing permissions and limitations | ||
# under the License. | ||
# | ||
--- | ||
name: airflow-teradata-docker-release | ||
on: | ||
Check warning on line 20 in .github/workflows/ci-teradata-docker-release.yml GitHub Actions / Static checks, mypy and constraints generation / Static checks
Check warning on line 20 in .github/workflows/ci-teradata-docker-release.yml GitHub Actions / Static checks, mypy and constraints generation / Static checks
Check warning on line 20 in .github/workflows/ci-teradata-docker-release.yml GitHub Actions / Static checks, mypy and constraints generation / Static checks
Check warning on line 20 in .github/workflows/ci-teradata-docker-release.yml GitHub Actions / Static checks, mypy and constraints generation / Static checks
Check warning on line 20 in .github/workflows/ci-teradata-docker-release.yml GitHub Actions / Static checks, mypy and constraints generation / Static checks
Check warning on line 20 in .github/workflows/ci-teradata-docker-release.yml GitHub Actions / Static checks, mypy and constraints generation / Static checks
|
||
push: | ||
branches: | ||
- teradata_release_* | ||
workflow_dispatch: | ||
permissions: | ||
contents: write | ||
pages: write | ||
id-token: write | ||
jobs: | ||
teradata-docker-release: | ||
name: "Build Airflow with Teradata Provider docker and Publish it to teradata docker repository" | ||
runs-on: "ubuntu-22.04" | ||
steps: | ||
- name: "Cleanup repo" | ||
run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" | ||
- name: "Checkout Teradata Airflow Source Code" | ||
uses: actions/checkout@v4 | ||
with: | ||
ref: ${{ vars.BRANCH }} | ||
persist-credentials: false | ||
- name: "Install Python" | ||
uses: actions/setup-python@v4 | ||
with: | ||
python-version: 3.8 | ||
cache: 'pip' | ||
- name: "Branch Name" | ||
run: echo running on branch ${{ vars.BRANCH }} | ||
- name: "Checkout docker build support files" | ||
if: ${{vars.BRANCH != 'teradata_main'}} | ||
run: | | ||
git fetch | ||
git checkout origin/teradata_main -- .github/workflows/scripts/docker/docker-compose.yaml | ||
git checkout origin/teradata_main -- .github/workflows/scripts/docker/Dockerfile | ||
cp .github/workflows/scripts/docker/docker-compose.yaml . | ||
cp .github/workflows/scripts/docker/Dockerfile . | ||
- name: "Build Airflow with Teradata Provider docker image" | ||
run: docker-compose build | ||
|
||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
FROM apache/airflow:2.8.3 | ||
USER airflow | ||
RUN pip install --no-cache-dir "apache-airflow==${AIRFLOW_VERSION}" apache-airflow-providers-teradata |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,285 @@ | ||
# Licensed to the Apache Software Foundation (ASF) under one | ||
# or more contributor license agreements. See the NOTICE file | ||
# distributed with this work for additional information | ||
# regarding copyright ownership. The ASF licenses this file | ||
# to you under the Apache License, Version 2.0 (the | ||
# "License"); you may not use this file except in compliance | ||
# with the License. You may obtain a copy of the License at | ||
# | ||
# http://www.apache.org/licenses/LICENSE-2.0 | ||
# | ||
# Unless required by applicable law or agreed to in writing, | ||
# software distributed under the License is distributed on an | ||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY | ||
# KIND, either express or implied. See the License for the | ||
# specific language governing permissions and limitations | ||
# under the License. | ||
# | ||
|
||
# Basic Airflow cluster configuration for CeleryExecutor with Redis and PostgreSQL. | ||
# | ||
# WARNING: This configuration is for local development. Do not use it in a production deployment. | ||
# | ||
# This configuration supports basic configuration using environment variables or an .env file | ||
# The following variables are supported: | ||
# | ||
# AIRFLOW_IMAGE_NAME - Docker image name used to run Airflow. | ||
# Default: apache/airflow:2.8.2 | ||
# AIRFLOW_UID - User ID in Airflow containers | ||
# Default: 50000 | ||
# AIRFLOW_PROJ_DIR - Base path to which all the files will be volumed. | ||
# Default: . | ||
# Those configurations are useful mostly in case of standalone testing/running Airflow in test/try-out mode | ||
# | ||
# _AIRFLOW_WWW_USER_USERNAME - Username for the administrator account (if requested). | ||
# Default: airflow | ||
# _AIRFLOW_WWW_USER_PASSWORD - Password for the administrator account (if requested). | ||
# Default: airflow | ||
# _PIP_ADDITIONAL_REQUIREMENTS - Additional PIP requirements to add when starting all containers. | ||
# Use this option ONLY for quick checks. Installing requirements at container | ||
# startup is done EVERY TIME the service is started. | ||
# A better way is to build a custom image or extend the official image | ||
# as described in https://airflow.apache.org/docs/docker-stack/build.html. | ||
# Default: '' | ||
# | ||
# Feel free to modify this file to suit your needs. | ||
--- | ||
x-airflow-common: | ||
&airflow-common | ||
# In order to add custom dependencies or upgrade provider packages you can use your extended image. | ||
# Comment the image line, place your Dockerfile in the directory where you placed the docker-compose.yaml | ||
# and uncomment the "build" line below, Then run `docker-compose build` to build the images. | ||
#image: ${AIRFLOW_IMAGE_NAME:-apache/airflow:2.8.2} | ||
Check warning on line 52 in .github/workflows/scripts/docker/docker-compose.yaml GitHub Actions / Static checks, mypy and constraints generation / Static checks
Check warning on line 52 in .github/workflows/scripts/docker/docker-compose.yaml GitHub Actions / Static checks, mypy and constraints generation / Static checks
Check warning on line 52 in .github/workflows/scripts/docker/docker-compose.yaml GitHub Actions / Static checks, mypy and constraints generation / Static checks
Check warning on line 52 in .github/workflows/scripts/docker/docker-compose.yaml GitHub Actions / Static checks, mypy and constraints generation / Static checks
Check warning on line 52 in .github/workflows/scripts/docker/docker-compose.yaml GitHub Actions / Static checks, mypy and constraints generation / Static checks
Check warning on line 52 in .github/workflows/scripts/docker/docker-compose.yaml GitHub Actions / Static checks, mypy and constraints generation / Static checks
|
||
#image: ${AIRFLOW_IMAGE_NAME:-airflow-teradata:2.0.0} | ||
Check warning on line 53 in .github/workflows/scripts/docker/docker-compose.yaml GitHub Actions / Static checks, mypy and constraints generation / Static checks
Check warning on line 53 in .github/workflows/scripts/docker/docker-compose.yaml GitHub Actions / Static checks, mypy and constraints generation / Static checks
Check warning on line 53 in .github/workflows/scripts/docker/docker-compose.yaml GitHub Actions / Static checks, mypy and constraints generation / Static checks
Check warning on line 53 in .github/workflows/scripts/docker/docker-compose.yaml GitHub Actions / Static checks, mypy and constraints generation / Static checks
Check warning on line 53 in .github/workflows/scripts/docker/docker-compose.yaml GitHub Actions / Static checks, mypy and constraints generation / Static checks
Check warning on line 53 in .github/workflows/scripts/docker/docker-compose.yaml GitHub Actions / Static checks, mypy and constraints generation / Static checks
|
||
build: . | ||
environment: | ||
&airflow-common-env | ||
AIRFLOW__CORE__EXECUTOR: CeleryExecutor | ||
AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow | ||
AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://airflow:airflow@postgres/airflow | ||
AIRFLOW__CELERY__BROKER_URL: redis://:@redis:6379/0 | ||
AIRFLOW__CORE__FERNET_KEY: '' | ||
AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'true' | ||
AIRFLOW__CORE__LOAD_EXAMPLES: 'false' | ||
AIRFLOW__API__AUTH_BACKENDS: 'airflow.api.auth.backend.basic_auth,airflow.api.auth.backend.session' | ||
# yamllint disable rule:line-length | ||
# Use simple http server on scheduler for health checks | ||
# See https://airflow.apache.org/docs/apache-airflow/stable/administration-and-deployment/logging-monitoring/check-health.html#scheduler-health-check-server | ||
# yamllint enable rule:line-length | ||
AIRFLOW__SCHEDULER__ENABLE_HEALTH_CHECK: 'true' | ||
AIRFLOW__CORE__TEST_CONNECTION: 'Enabled' | ||
# WARNING: Use _PIP_ADDITIONAL_REQUIREMENTS option ONLY for a quick checks | ||
# for other purpose (development, test and especially production usage) build/extend Airflow image. | ||
_PIP_ADDITIONAL_REQUIREMENTS: ${_PIP_ADDITIONAL_REQUIREMENTS:-} | ||
volumes: | ||
- ${AIRFLOW_PROJ_DIR:-.}/dags:/opt/airflow/dags | ||
- ${AIRFLOW_PROJ_DIR:-.}/logs:/opt/airflow/logs | ||
- ${AIRFLOW_PROJ_DIR:-.}/config:/opt/airflow/config | ||
- ${AIRFLOW_PROJ_DIR:-.}/plugins:/opt/airflow/plugins | ||
user: "${AIRFLOW_UID:-50000}:0" | ||
depends_on: | ||
&airflow-common-depends-on | ||
redis: | ||
condition: service_healthy | ||
postgres: | ||
condition: service_healthy | ||
|
||
services: | ||
postgres: | ||
image: postgres:13 | ||
environment: | ||
POSTGRES_USER: airflow | ||
POSTGRES_PASSWORD: airflow | ||
POSTGRES_DB: airflow | ||
volumes: | ||
- postgres-db-volume:/var/lib/postgresql/data | ||
healthcheck: | ||
test: ["CMD", "pg_isready", "-U", "airflow"] | ||
interval: 10s | ||
retries: 5 | ||
start_period: 5s | ||
restart: always | ||
|
||
redis: | ||
image: redis:latest | ||
expose: | ||
- 6379 | ||
healthcheck: | ||
test: ["CMD", "redis-cli", "ping"] | ||
interval: 10s | ||
timeout: 30s | ||
retries: 50 | ||
start_period: 30s | ||
restart: always | ||
|
||
airflow-webserver: | ||
<<: *airflow-common | ||
command: webserver | ||
ports: | ||
- "8080:8080" | ||
healthcheck: | ||
test: ["CMD", "curl", "--fail", "http://localhost:8080/health"] | ||
interval: 30s | ||
timeout: 10s | ||
retries: 5 | ||
start_period: 30s | ||
restart: always | ||
depends_on: | ||
<<: *airflow-common-depends-on | ||
airflow-init: | ||
condition: service_completed_successfully | ||
|
||
airflow-scheduler: | ||
<<: *airflow-common | ||
command: scheduler | ||
healthcheck: | ||
test: ["CMD", "curl", "--fail", "http://localhost:8974/health"] | ||
interval: 30s | ||
timeout: 10s | ||
retries: 5 | ||
start_period: 30s | ||
restart: always | ||
depends_on: | ||
<<: *airflow-common-depends-on | ||
airflow-init: | ||
condition: service_completed_successfully | ||
|
||
airflow-worker: | ||
<<: *airflow-common | ||
command: celery worker | ||
healthcheck: | ||
# yamllint disable rule:line-length | ||
test: | ||
- "CMD-SHELL" | ||
- 'celery --app airflow.providers.celery.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}" || celery --app airflow.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}"' | ||
interval: 30s | ||
timeout: 10s | ||
retries: 5 | ||
start_period: 30s | ||
environment: | ||
<<: *airflow-common-env | ||
# Required to handle warm shutdown of the celery workers properly | ||
# See https://airflow.apache.org/docs/docker-stack/entrypoint.html#signal-propagation | ||
DUMB_INIT_SETSID: "0" | ||
restart: always | ||
depends_on: | ||
<<: *airflow-common-depends-on | ||
airflow-init: | ||
condition: service_completed_successfully | ||
|
||
airflow-triggerer: | ||
<<: *airflow-common | ||
command: triggerer | ||
healthcheck: | ||
test: ["CMD-SHELL", 'airflow jobs check --job-type TriggererJob --hostname "$${HOSTNAME}"'] | ||
interval: 30s | ||
timeout: 10s | ||
retries: 5 | ||
start_period: 30s | ||
restart: always | ||
depends_on: | ||
<<: *airflow-common-depends-on | ||
airflow-init: | ||
condition: service_completed_successfully | ||
|
||
airflow-init: | ||
<<: *airflow-common | ||
entrypoint: /bin/bash | ||
# yamllint disable rule:line-length | ||
command: | ||
- -c | ||
- | | ||
if [[ -z "${AIRFLOW_UID}" ]]; then | ||
echo | ||
echo -e "\033[1;33mWARNING!!!: AIRFLOW_UID not set!\e[0m" | ||
echo "If you are on Linux, you SHOULD follow the instructions below to set " | ||
echo "AIRFLOW_UID environment variable, otherwise files will be owned by root." | ||
echo "For other operating systems you can get rid of the warning with manually created .env file:" | ||
echo " See: https://airflow.apache.org/docs/apache-airflow/stable/howto/docker-compose/index.html#setting-the-right-airflow-user" | ||
echo | ||
fi | ||
one_meg=1048576 | ||
mem_available=$$(($$(getconf _PHYS_PAGES) * $$(getconf PAGE_SIZE) / one_meg)) | ||
cpus_available=$$(grep -cE 'cpu[0-9]+' /proc/stat) | ||
disk_available=$$(df / | tail -1 | awk '{print $$4}') | ||
warning_resources="false" | ||
if (( mem_available < 4000 )) ; then | ||
echo | ||
echo -e "\033[1;33mWARNING!!!: Not enough memory available for Docker.\e[0m" | ||
echo "At least 4GB of memory required. You have $$(numfmt --to iec $$((mem_available * one_meg)))" | ||
echo | ||
warning_resources="true" | ||
fi | ||
if (( cpus_available < 2 )); then | ||
echo | ||
echo -e "\033[1;33mWARNING!!!: Not enough CPUS available for Docker.\e[0m" | ||
echo "At least 2 CPUs recommended. You have $${cpus_available}" | ||
echo | ||
warning_resources="true" | ||
fi | ||
if (( disk_available < one_meg * 10 )); then | ||
echo | ||
echo -e "\033[1;33mWARNING!!!: Not enough Disk space available for Docker.\e[0m" | ||
echo "At least 10 GBs recommended. You have $$(numfmt --to iec $$((disk_available * 1024 )))" | ||
echo | ||
warning_resources="true" | ||
fi | ||
if [[ $${warning_resources} == "true" ]]; then | ||
echo | ||
echo -e "\033[1;33mWARNING!!!: You have not enough resources to run Airflow (see above)!\e[0m" | ||
echo "Please follow the instructions to increase amount of resources available:" | ||
echo " https://airflow.apache.org/docs/apache-airflow/stable/howto/docker-compose/index.html#before-you-begin" | ||
echo | ||
fi | ||
mkdir -p /sources/logs /sources/dags /sources/plugins | ||
chown -R "${AIRFLOW_UID}:0" /sources/{logs,dags,plugins} | ||
exec /entrypoint airflow version | ||
# yamllint enable rule:line-length | ||
environment: | ||
<<: *airflow-common-env | ||
_AIRFLOW_DB_MIGRATE: 'true' | ||
_AIRFLOW_WWW_USER_CREATE: 'true' | ||
_AIRFLOW_WWW_USER_USERNAME: ${_AIRFLOW_WWW_USER_USERNAME:-airflow} | ||
_AIRFLOW_WWW_USER_PASSWORD: ${_AIRFLOW_WWW_USER_PASSWORD:-airflow} | ||
_PIP_ADDITIONAL_REQUIREMENTS: '' | ||
user: "0:0" | ||
volumes: | ||
- ${AIRFLOW_PROJ_DIR:-.}:/sources | ||
|
||
airflow-cli: | ||
<<: *airflow-common | ||
profiles: | ||
- debug | ||
environment: | ||
<<: *airflow-common-env | ||
CONNECTION_CHECK_MAX_COUNT: "0" | ||
# Workaround for entrypoint issue. See: https://github.com/apache/airflow/issues/16252 | ||
command: | ||
- bash | ||
- -c | ||
- airflow | ||
|
||
# You can enable flower by adding "--profile flower" option e.g. docker-compose --profile flower up | ||
# or by explicitly targeted on the command line e.g. docker-compose up flower. | ||
# See: https://docs.docker.com/compose/profiles/ | ||
flower: | ||
<<: *airflow-common | ||
command: celery flower | ||
profiles: | ||
- flower | ||
ports: | ||
- "5555:5555" | ||
healthcheck: | ||
test: ["CMD", "curl", "--fail", "http://localhost:5555/"] | ||
interval: 30s | ||
timeout: 10s | ||
retries: 5 | ||
start_period: 30s | ||
restart: always | ||
depends_on: | ||
<<: *airflow-common-depends-on | ||
airflow-init: | ||
condition: service_completed_successfully | ||
|
||
volumes: | ||
postgres-db-volume: |