-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathdocker-compose.yaml
More file actions
106 lines (102 loc) · 3.61 KB
/
docker-compose.yaml
File metadata and controls
106 lines (102 loc) · 3.61 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
services:
postgres:
image: postgres:17 # TODO: ensure compatibility with our GCP PostgreSQL
environment:
- POSTGRES_USER
- POSTGRES_PASSWORD
- POSTGRES_DB
- POSTGRES_HOST
- POSTGRES_PORT
volumes:
- pgdata:/var/lib/postgresql/data
ports:
- "5436:5432"
mlflow:
build:
dockerfile: Dockerfile.mlflow
environment:
POSTGRES_USER: ${POSTGRES_USER}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
POSTGRES_DB: ${POSTGRES_DB}
POSTGRES_HOST: ${POSTGRES_HOST}
POSTGRES_PORT: ${POSTGRES_PORT}
MLFLOW_TRACKING_URI: ${MLFLOW_TRACKING_URI}
MLFLOW_ARTIFACT_DESTINATION: ${MLFLOW_ARTIFACT_DESTINATION}
# if not provided via volume below, GS will not work as artifact store
GOOGLE_APPLICATION_CREDENTIALS: /creds/gcp-key.json
# if not provided via volume below, AWS S3 will not work as artifact store
AWS_SHARED_CREDENTIALS_FILE: /creds/aws-credentials
# https://mlflow.org/docs/latest/community/usage-tracking/
MLFLOW_DISABLE_TELEMETRY: "true"
DO_NOT_TRACK: "true"
depends_on:
- postgres
# grab backend from .env, pass artifact root, if provided, otherwise local storage of artifacts
command: >
mlflow server
--backend-store-uri postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}
--artifacts-destination ${MLFLOW_ARTIFACT_DESTINATION}
--serve-artifacts
--host 0.0.0.0
--port 8080
--allowed-hosts 'mlflow:*,localhost:*,127.0.0.1:*'
ports:
- "8080:8080"
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8080/health"]
interval: 10s
timeout: 5s
retries: 5
volumes:
# Volume only needed for local storage of artifacts
- ./mlruns:/mlruns
# Volume only needed for GS storage artifacts
- ${GOOGLE_CREDENTIALS_PATH:-/dev/null}:/creds/gcp-key.json:ro
# Volume only needed for AWS S3 storage artifacts
- ${AWS_CREDENTIALS_PATH:-/dev/null}:/creds/aws-credentials:ro
jupyter:
build:
context: .
dockerfile: Dockerfile.jupyter
environment:
MLFLOW_TRACKING_URI: ${MLFLOW_TRACKING_URI}
MLFLOW_TRACKING_USERNAME: ${MLFLOW_TRACKING_USERNAME}
MLFLOW_TRACKING_PASSWORD: ${MLFLOW_TRACKING_PASSWORD}
JUPYTER_TOKEN: ${JUPYTER_TOKEN}
GIT_PYTHON_REFRESH: ${GIT_PYTHON_REFRESH}
VLLM_API_KEY: ${VLLM_API_KEY}
# Below env needed for dvc (via git) support (backed by GCP)
# SSH_AUTH_SOCK: /ssh-agent
# GOOGLE_APPLICATION_CREDENTIALS: /creds/gcp-key.json
ports:
- "8888:8888"
volumes:
- ./flightpaths:/app/flightpaths
# Used for caching of SUT/annotator results
- ./flightpaths/.cache:/app/flightpaths/.cache
# Volume not needed if not using modelplane-flights for sharing notebooks
- ../modelplane-flights:/app/flightpaths/flights
# Volume not needed if using cloud storage for artifacts
- ./mlruns:/mlruns
# Below needed for dvc (via git) support (backed by GCP)
# - ${SSH_AUTH_SOCK:-/dev/null}:/ssh-agent
# - ${GOOGLE_CREDENTIALS_PATH:-/dev/null}:/creds/gcp-key.json:ro
# Runs a dummy docker container to mock a vLLM server
vllm:
build:
context: .
dockerfile: Dockerfile.mockvllm
environment:
VLLM_MODEL: mlc/not-real-model
VLLM_HOST: 0.0.0.0
VLLM_PORT: 8001
VLLM_API_KEY: ${VLLM_API_KEY}
ports:
- "8001:8001"
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8001/health"]
interval: 10s
timeout: 5s
retries: 10
volumes:
pgdata: