-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathdocker-compose.yaml
More file actions
80 lines (71 loc) · 2.74 KB
/
docker-compose.yaml
File metadata and controls
80 lines (71 loc) · 2.74 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
# Apex Wealth - Airflow Docker Setup
# =====================================
# Starts 4 services:
# airflow-db Airflow's own internal database (metadata, logs)
# airflow-init Runs once to set up Airflow and create the admin user
# airflow-webserver The Airflow UI at http://localhost:8080
# airflow-scheduler Watches your DAGs folder and runs tasks on schedule
#
# --------------------------------
#
# Login: admin / admin
# Shared settings reused by webserver, scheduler, and init
x-airflow-common: &airflow-common
image: apache/airflow:2.9.2 # classic UI
env_file: .env
environment:
# Use LocalExecutor - simple, no extra worker containers needed
AIRFLOW__CORE__EXECUTOR: LocalExecutor
# Airflow's own metadata database (NOT your stock data)
AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@airflow-db/airflow
# Hide the 50+ example DAGs so the UI is clean for students
AIRFLOW__CORE__LOAD_EXAMPLES: 'false'
# Shared secret so webserver and scheduler can read each other's logs
AIRFLOW__WEBSERVER__SECRET_KEY: 'apex_secret_key_2026'
# Install the Python packages our pipeline needs at startup
_PIP_ADDITIONAL_REQUIREMENTS: "requests pandas psycopg2-binary sqlalchemy python-dotenv"
# DEFAULT: loads into your local Postgres machine
# BACKUP: change this to 'stock-db' if local Postgres connection fails
DB_HOST: host.docker.internal
volumes:
# Mount your local dags/ folder - edits appear instantly without restart
- ./dags:/opt/airflow/dags
depends_on:
airflow-db:
condition: service_healthy
services:
# 1. Airflow's internal metadata database
airflow-db:
image: postgres:15
container_name: airflow-db
environment:
POSTGRES_USER: airflow
POSTGRES_PASSWORD: airflow
POSTGRES_DB: airflow
healthcheck:
test: ["CMD", "pg_isready", "-U", "airflow"]
interval: 5s
retries: 5
# 2. One-time setup - creates tables and admin user then exits
airflow-init:
<<: *airflow-common
container_name: airflow-init
command: bash -c "airflow db migrate && airflow users create --username admin --password admin --firstname Admin --lastname User --role Admin --email admin@apex.com"
# 3. Web UI - open http://localhost:8080 in your browser
airflow-webserver:
<<: *airflow-common
container_name: airflow-webserver
command: webserver
ports:
- "8080:8080"
depends_on:
airflow-init:
condition: service_completed_successfully
# 4. Scheduler - finds and triggers your DAGs automatically
airflow-scheduler:
<<: *airflow-common
container_name: airflow-scheduler
command: scheduler
depends_on:
airflow-init:
condition: service_completed_successfully