-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathdocker-compose.localdb.yaml
104 lines (96 loc) · 2.94 KB
/
docker-compose.localdb.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
# docker compose spec
# https://github.com/compose-spec/compose-spec/blob/master/spec.md
# https://github.com/compose-spec/compose-spec/blob/master/spec.md#extension
# https://github.com/compose-spec/compose-spec/blob/master/spec.md#fragments
# airflow configs
# https://airflow.apache.org/docs/apache-airflow/stable/configurations-ref.html#webserver
# ====================================== AIRFLOW ENVIRONMENT VARIABLES =======================================
---
version: '3.7'
x-airflow-common:
&airflow-common
image: ${AIRFLOW_IMAGE_NAME:-apache/airflow:2.3.3-python3.9}
environment:
&airflow-common-env
AIRFLOW__CORE__EXECUTOR: LocalExecutor
AIRFLOW__CORE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow
AIRFLOW__CORE__FERNET_KEY: ''
AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'true'
AIRFLOW__CORE__LOAD_EXAMPLES: 'false'
AIRFLOW__SCHEDULER__MIN_FILE_PROCESS_INTERVAL: 10
volumes:
- ./dags:/opt/airflow/dags
- ./logs:/opt/airflow/logs
- ./plugins:/opt/airflow/plugins
- ./data:/data
- ./mlflow:/mlflow
user: "${AIRFLOW_UID:-50000}:0"
depends_on:
postgres:
condition: service_healthy
# ====================================== /AIRFLOW ENVIRONMENT VARIABLES ======================================
services:
postgres:
image: postgres:13
environment:
POSTGRES_USER: airflow
POSTGRES_PASSWORD: airflow
POSTGRES_DB: airflow
volumes:
- postgres-db-volume:/var/lib/postgresql/data
ports:
- "5432:5432"
healthcheck:
test: ["CMD", "pg_isready", "-U", "airflow"]
interval: 5s
retries: 5
restart: always
airflow-init:
<<: *airflow-common
build:
context: ./docker/airflow-base
image: instork/airflow-init
entrypoint: /bin/bash
command:
- -c
- |
mkdir -p /sources/logs /sources/dags /sources/plugins /sources/data /sources/mlflow
chown -R "${AIRFLOW_UID}:0" /sources/{logs,dags,plugins,data,mlflow}
exec /entrypoint airflow version
env_file:
- airflow.env
environment:
<<: *airflow-common-env
_AIRFLOW_DB_UPGRADE: 'true'
_AIRFLOW_WWW_USER_CREATE: 'true'
# set below on airflow.env
# _AIRFLOW_WWW_USER_USERNAME: your_username
# _AIRFLOW_WWW_USER_PASSWORD: your_pwd
user: "0:0"
volumes:
- .:/sources
airflow-webserver:
<<: *airflow-common
build:
context: ./docker/airflow-base
image: instork/airflow-webserver
command: webserver
ports:
- 8080:8080
healthcheck:
test: ["CMD", "curl", "--fail", "http://localhost:8085/health"]
interval: 10s
timeout: 10s
retries: 5
restart: always
airflow-scheduler:
<<: *airflow-common
build:
context: ./docker/airflow-scheduler-local
image: instork/airflow-scheduler
command: scheduler
restart: always
extra_hosts:
- 'mongoservice:172.17.0.1'
volumes:
postgres-db-volume: