-
Notifications
You must be signed in to change notification settings - Fork 262
Expand file tree
/
Copy pathapp.yaml_backup
More file actions
124 lines (114 loc) · 5.12 KB
/
app.yaml_backup
File metadata and controls
124 lines (114 loc) · 5.12 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
# Databricks Apps configuration for Builder App
# Copy this file to app.yaml and customize for your deployment
#
# Prerequisites:
# 1. Create the app: databricks apps create <your-app-name>
# 2. Add Lakebase as a resource (see instructions below)
# 3. Configure your LLM provider settings
command:
- "uvicorn"
- "server.app:app"
- "--host"
- "0.0.0.0"
- "--port"
- "$DATABRICKS_APP_PORT"
env:
# =============================================================================
# Application Settings
# =============================================================================
- name: ENV
value: "production"
- name: PROJECTS_BASE_DIR
value: "./projects"
- name: PYTHONPATH
value: "/app/python/source_code/packages"
# =============================================================================
# Skills Configuration
# =============================================================================
# Comma-separated list of skills to enable
- name: ENABLED_SKILLS
value: "databricks-asset-bundles,databricks-agent-bricks,databricks-aibi-dashboards,databricks-app-apx,databricks-app-python,databricks-config,databricks-docs,databricks-jobs,databricks-python-sdk,databricks-unity-catalog,mlflow-evaluation,spark-declarative-pipelines,synthetic-data-generation,unstructured-pdf-generation"
- name: SKILLS_ONLY_MODE
value: "false"
# =============================================================================
# Database Configuration (Lakebase)
# =============================================================================
# IMPORTANT: You must add Lakebase as an app resource for database connectivity.
#
# Steps:
# 1. Create a Lakebase instance in your workspace (if not exists)
# 2. Add it as an app resource:
# databricks apps add-resource <app-name> \
# --resource-type database \
# --resource-name lakebase \
# --database-instance <your-lakebase-instance-name>
#
# When added as a resource, Databricks automatically sets:
# - PGHOST, PGPORT, PGUSER, PGPASSWORD, PGDATABASE
#
# You only need to specify the instance name for OAuth token generation:
- name: LAKEBASE_INSTANCE_NAME
value: "fe-shared-demo"
- name: LAKEBASE_DATABASE_NAME
value: "databricks_postgres"
- name: LAKEBASE_SCHEMA_NAME
value: "builder_app"
# =============================================================================
# LLM Provider Configuration
# =============================================================================
# Option 1: Databricks Foundation Models (default)
- name: LLM_PROVIDER
value: "DATABRICKS"
- name: DATABRICKS_MODEL
value: "databricks-meta-llama-3-3-70b-instruct"
- name: DATABRICKS_MODEL_MINI
value: "databricks-gemini-3-flash"
# Option 2: Anthropic Claude (uncomment and add your key)
# - name: ANTHROPIC_API_KEY
# value: "<your-anthropic-api-key>"
# Option 3: Azure OpenAI (uncomment and configure)
# - name: LLM_PROVIDER
# value: "AZURE"
# - name: AZURE_OPENAI_API_KEY
# value: "<your-azure-api-key>"
# - name: AZURE_OPENAI_ENDPOINT
# value: "https://<your-resource>.cognitiveservices.azure.com/"
# - name: AZURE_OPENAI_API_VERSION
# value: "2024-08-01-preview"
# - name: AZURE_OPENAI_DEPLOYMENT
# value: "gpt-4o"
# - name: AZURE_OPENAI_DEPLOYMENT_MINI
# value: "gpt-4o-mini"
# =============================================================================
# Claude SDK Configuration (Databricks FMAPI)
# =============================================================================
# These configure the Claude Agent SDK to use Databricks model serving endpoints
# instead of hitting Anthropic directly. The app dynamically sets ANTHROPIC_BASE_URL
# and ANTHROPIC_AUTH_TOKEN from the user's Databricks credentials at runtime.
- name: ANTHROPIC_MODEL
value: "databricks-claude-opus-4-6"
- name: ANTHROPIC_MODEL_MINI
value: "databricks-claude-sonnet-4-6"
- name: CLAUDE_CODE_STREAM_CLOSE_TIMEOUT
value: "3600000"
# =============================================================================
# MLflow Tracing Configuration
# =============================================================================
# Enable MLflow tracing for Claude Code conversations
# Traces are automatically sent to your Databricks workspace
# See: https://docs.databricks.com/aws/en/mlflow3/genai/tracing/integrations/claude-code
- name: MLFLOW_CLAUDE_TRACING_ENABLED
value: "true"
- name: MLFLOW_TRACKING_URI
value: "databricks"
- name: MLFLOW_REGISTRY_URI
value: "databricks-uc"
# Optional: Default MLflow experiment for traces (can be overridden per-session in the UI)
- name: MLFLOW_EXPERIMENT_NAME
value: "" # Set to your MLflow experiment path, e.g. "/Users/your.email@databricks.com/claude-code-traces"
# =============================================================================
# Permission Configuration
# =============================================================================
# Grant created resources to this principal (e.g., "account users" for all)
- name: AUTO_GRANT_PERMISSIONS_TO
value: "account users"