@@ -25,7 +25,7 @@ volumes:
2525 driver_opts :
2626 << : *driver_default
2727 device : ${DATA_PORTAINER:-/opt/data/docker/portainer2.9}
28- vol_postgres :
28+ vol_postgres14 :
2929 << : *volume_default
3030 driver_opts :
3131 << : *driver_default
@@ -50,22 +50,23 @@ volumes:
5050# AIRFLOW GLOBAL
5151# ================================================================================================
5252x-airflow-common : &airflow-common
53- image : ${AIRFLOW_IMAGE_NAME:-apache/airflow:2.2.2 }
53+ image : ${AIRFLOW_IMAGE_NAME:-apache/airflow:2.2.4 }
5454 environment : &airflow-common-env
5555 AIRFLOW__CORE__EXECUTOR : CeleryExecutor
56- AIRFLOW__CORE__SQL_ALCHEMY_CONN : postgresql+psycopg2://postgres:passw0rdDB @postgresql/airflow
57- AIRFLOW__CELERY__RESULT_BACKEND : db+postgresql://postgres:passw0rdDB @postgresql/airflow
56+ AIRFLOW__CORE__SQL_ALCHEMY_CONN : postgresql+psycopg2://airflow:airflow @postgresql/airflow
57+ AIRFLOW__CELERY__RESULT_BACKEND : db+postgresql://airflow:airflow @postgresql/airflow
5858 AIRFLOW__CELERY__BROKER_URL : redis://:@redis:6379/0
5959 AIRFLOW__CORE__FERNET_KEY : " "
6060 AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION : " true"
6161 AIRFLOW__CORE__LOAD_EXAMPLES : " true"
6262 AIRFLOW__API__AUTH_BACKEND : " airflow.api.auth.backend.basic_auth"
63+ _PIP_ADDITIONAL_REQUIREMENTS : ${_PIP_ADDITIONAL_REQUIREMENTS:-}
6364 volumes :
6465 - vol_airflow_dags:/opt/airflow/dags
6566 - vol_airflow_logs:/opt/airflow/logs
6667 - vol_airflow_plugins:/opt/airflow/plugins
67- user : " ${AIRFLOW_UID:-50000}:${AIRFLOW_GID:-50000} "
68- depends_on :
68+ user : " ${AIRFLOW_UID:-50000}:0 "
69+ depends_on : &airflow-common-depends-on
6970 redis :
7071 condition : service_healthy
7172 postgres :
@@ -126,14 +127,19 @@ services:
126127 volumes :
127128 # - /etc/localtime:/etc/localtime:ro ## Do not use it in mac
128129 # - /var/run/docker.sock:/var/run/docker.sock ## Do not use it in k8s
129- - vol_postgres :/var/lib/postgresql
130+ - vol_postgres14 :/var/lib/postgresql
130131 command : postgres -c shared_preload_libraries=pg_stat_statements -c pg_stat_statements.track=all -c max_connections=200
131132 environment :
132133 - TZ="Asia/Jakarta"
133134 - POSTGRES_VERSION=${POSTGRES_VERSION:-14.1}
134- - POSTGRES_USER=${POSTGRES_USER:-postgres}
135- - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-passw0rdDB}
135+ - POSTGRES_USER=${POSTGRES_USER:-airflow}
136+ - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-airflow}
137+ - POSTGRES_DB=${POSTGRES_DB:-airflow}
136138 # - PGDATA=${PGDATA:-/var/lib/postgresql/pgdata}
139+ healthcheck :
140+ test : ["CMD", "pg_isready", "-U", "airflow"]
141+ interval : 5s
142+ retries : 5
137143 privileged : true
138144 tty : true
139145 networks :
@@ -155,6 +161,10 @@ services:
155161 interval : 10s
156162 timeout : 10s
157163 retries : 5
164+ depends_on :
165+ << : *airflow-common-depends-on
166+ airflow-init :
167+ condition : service_completed_successfully
158168 privileged : true
159169 tty : true
160170 networks :
@@ -175,6 +185,10 @@ services:
175185 interval : 10s
176186 timeout : 10s
177187 retries : 5
188+ depends_on :
189+ << : *airflow-common-depends-on
190+ airflow-init :
191+ condition : service_completed_successfully
178192
179193 airflow-worker :
180194 << : *airflow-common
@@ -188,17 +202,121 @@ services:
188202 interval : 10s
189203 timeout : 10s
190204 retries : 5
205+ depends_on :
206+ << : *airflow-common-depends-on
207+ airflow-init :
208+ condition : service_completed_successfully
209+
210+ airflow-triggerer :
211+ << : *airflow-common
212+ container_name : ${CONTAINER_AIRFLOW_TRIGGER:-devopscorner_airflow_trigger}
213+ restart : always
214+ command : triggerer
215+ healthcheck :
216+ test :
217+ [
218+ " CMD-SHELL" ,
219+ ' airflow jobs check --job-type TriggererJob --hostname "$${HOSTNAME}"' ,
220+ ]
221+ interval : 10s
222+ timeout : 10s
223+ retries : 5
224+ depends_on :
225+ << : *airflow-common-depends-on
226+ airflow-init :
227+ condition : service_completed_successfully
191228
192229 airflow-init :
193230 << : *airflow-common
194231 container_name : ${CONTAINER_AIRFLOW_INIT:-devopscorner_airflow_init}
232+ entrypoint : /bin/bash
233+ # yamllint disable rule:line-length
234+ command :
235+ - -c
236+ - |
237+ function ver() {
238+ printf "%04d%04d%04d%04d" $${1//./ }
239+ }
240+ airflow_version=$$(gosu airflow airflow version)
241+ airflow_version_comparable=$$(ver $${airflow_version})
242+ min_airflow_version=2.2.0
243+ min_airflow_version_comparable=$$(ver $${min_airflow_version})
244+ if (( airflow_version_comparable < min_airflow_version_comparable )); then
245+ echo
246+ echo -e "\033[1;31mERROR!!!: Too old Airflow version $${airflow_version}!\e[0m"
247+ echo "The minimum Airflow version supported: $${min_airflow_version}. Only use this or higher!"
248+ echo
249+ exit 1
250+ fi
251+ if [[ -z "${AIRFLOW_UID}" ]]; then
252+ echo
253+ echo -e "\033[1;33mWARNING!!!: AIRFLOW_UID not set!\e[0m"
254+ echo "If you are on Linux, you SHOULD follow the instructions below to set "
255+ echo "AIRFLOW_UID environment variable, otherwise files will be owned by root."
256+ echo "For other operating systems you can get rid of the warning with manually created .env file:"
257+ echo " See: https://airflow.apache.org/docs/apache-airflow/stable/start/docker.html#setting-the-right-airflow-user"
258+ echo
259+ fi
260+ one_meg=1048576
261+ mem_available=$$(($$(getconf _PHYS_PAGES) * $$(getconf PAGE_SIZE) / one_meg))
262+ cpus_available=$$(grep -cE 'cpu[0-9]+' /proc/stat)
263+ disk_available=$$(df / | tail -1 | awk '{print $$4}')
264+ warning_resources="false"
265+ if (( mem_available < 4000 )) ; then
266+ echo
267+ echo -e "\033[1;33mWARNING!!!: Not enough memory available for Docker.\e[0m"
268+ echo "At least 4GB of memory required. You have $$(numfmt --to iec $$((mem_available * one_meg)))"
269+ echo
270+ warning_resources="true"
271+ fi
272+ if (( cpus_available < 2 )); then
273+ echo
274+ echo -e "\033[1;33mWARNING!!!: Not enough CPUS available for Docker.\e[0m"
275+ echo "At least 2 CPUs recommended. You have $${cpus_available}"
276+ echo
277+ warning_resources="true"
278+ fi
279+ if (( disk_available < one_meg * 10 )); then
280+ echo
281+ echo -e "\033[1;33mWARNING!!!: Not enough Disk space available for Docker.\e[0m"
282+ echo "At least 10 GBs recommended. You have $$(numfmt --to iec $$((disk_available * 1024 )))"
283+ echo
284+ warning_resources="true"
285+ fi
286+ if [[ $${warning_resources} == "true" ]]; then
287+ echo
288+ echo -e "\033[1;33mWARNING!!!: You have not enough resources to run Airflow (see above)!\e[0m"
289+ echo "Please follow the instructions to increase amount of resources available:"
290+ echo " https://airflow.apache.org/docs/apache-airflow/stable/start/docker.html#before-you-begin"
291+ echo
292+ fi
293+ mkdir -p /sources/logs /sources/dags /sources/plugins
294+ chown -R "${AIRFLOW_UID}:0" /sources/{logs,dags,plugins}
295+ exec /entrypoint airflow version
296+ # yamllint enable rule:line-length
195297 environment :
196298 << : *airflow-common-env
197299 _AIRFLOW_DB_UPGRADE : " true"
198300 _AIRFLOW_WWW_USER_CREATE : " true"
199- _AIRFLOW_WWW_USER_USERNAME : ${_AIRFLOW_WWW_USER_USERNAME:-postgres}
200- _AIRFLOW_WWW_USER_PASSWORD : ${_AIRFLOW_WWW_USER_PASSWORD:-passw0rdDB}
201- command : version
301+ _AIRFLOW_WWW_USER_USERNAME : ${_AIRFLOW_WWW_USER_USERNAME:-airflow}
302+ _AIRFLOW_WWW_USER_PASSWORD : ${_AIRFLOW_WWW_USER_PASSWORD:-airflow}
303+ user : " 0:0"
304+ volumes :
305+ - .:/sources
306+
307+ airflow-cli :
308+ << : *airflow-common
309+ container_name : ${CONTAINER_AIRFLOW_CLI:-devopscorner_airflow_cli}
310+ profiles :
311+ - debug
312+ environment :
313+ << : *airflow-common-env
314+ CONNECTION_CHECK_MAX_COUNT : " 0"
315+ # Workaround for entrypoint issue. See: https://github.com/apache/airflow/issues/16252
316+ command :
317+ - bash
318+ - -c
319+ - airflow
202320
203321 flower :
204322 << : *airflow-common
@@ -212,3 +330,7 @@ services:
212330 interval : 10s
213331 timeout : 10s
214332 retries : 5
333+ depends_on :
334+ << : *airflow-common-depends-on
335+ airflow-init :
336+ condition : service_completed_successfully
0 commit comments