-
Notifications
You must be signed in to change notification settings - Fork 262
Expand file tree
/
Copy path.env.example
More file actions
109 lines (92 loc) · 5.51 KB
/
.env.example
File metadata and controls
109 lines (92 loc) · 5.51 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# Databricks Builder App - Environment Configuration
# ==================================================
#
# For LOCAL DEVELOPMENT: The setup script copies this file to .env.local automatically.
# If setting up manually, copy this file to .env.local and fill in your values.
# For DEPLOYMENT: Copy app.yaml.example to app.yaml and configure there.
#
# The app loads .env.local for local development. When deploying to Databricks Apps,
# use app.yaml for environment configuration instead.
# =============================================================================
# Databricks Configuration (Local Development)
# =============================================================================
# Workspace URL and personal access token
# In production (Databricks Apps), authentication is handled automatically via
# the service principal's OAuth credentials - no token needed.
DATABRICKS_HOST=https://your-workspace.cloud.databricks.com
DATABRICKS_TOKEN=dapi...
# =============================================================================
# Database Configuration (Lakebase)
# =============================================================================
# Choose ONE of the following options. The app auto-detects the mode based on
# which variable is set (LAKEBASE_ENDPOINT takes priority over LAKEBASE_INSTANCE_NAME).
# Option 1a: Autoscale Lakebase — recommended (scales to zero when idle)
# Find endpoint name in: Catalog → Lakebase → your project → Branches → Endpoints
# Format: projects/<project-name>/branches/<branch>/endpoints/<endpoint>
LAKEBASE_ENDPOINT=projects/your-project/branches/production/endpoints/primary
LAKEBASE_DATABASE_NAME=databricks_postgres
# Option 1b: Provisioned Lakebase — fixed capacity
# Find instance name in: Catalog → Lakebase → your instance
# LAKEBASE_INSTANCE_NAME=your-lakebase-instance
# LAKEBASE_DATABASE_NAME=databricks_postgres
# Option 2: Static connection URL — simplest for local dev (no auto token refresh)
# Format: postgresql://<email-urlencoded>:<oauth-token>@<host>:5432/<database>?sslmode=require
# Example (autoscale host):
# LAKEBASE_PG_URL=postgresql://anirvan.sen%40databricks.com:dapi...@ep-dry-sound-d2p70ooh.database.us-east-1.cloud.databricks.com:5432/databricks_postgres?sslmode=require
# Example (provisioned host):
# LAKEBASE_PG_URL=postgresql://anirvan.sen%40databricks.com:dapi...@my-instance.database.cloud.databricks.com:5432/databricks_postgres?sslmode=require
# Note: @ in email must be URL-encoded as %40. Token expires in ~1 hour.
# =============================================================================
# LLM Provider Configuration
# =============================================================================
# Choose your LLM provider: DATABRICKS (default) or AZURE
LLM_PROVIDER=DATABRICKS
# Databricks Foundation Models (adjust to models available in your workspace)
# Examples: databricks-meta-llama-3-3-70b-instruct, databricks-claude-sonnet-4
DATABRICKS_MODEL=databricks-meta-llama-3-3-70b-instruct
DATABRICKS_MODEL_MINI=databricks-gemini-3-flash
# Azure OpenAI (uncomment if using Azure instead)
# LLM_PROVIDER=AZURE
# AZURE_OPENAI_API_KEY=your-api-key
# AZURE_OPENAI_ENDPOINT=https://your-resource.cognitiveservices.azure.com/
# AZURE_OPENAI_API_VERSION=2024-08-01-preview
# AZURE_OPENAI_DEPLOYMENT=gpt-4o
# AZURE_OPENAI_DEPLOYMENT_MINI=gpt-4o-mini
# =============================================================================
# Skills Configuration
# =============================================================================
# Skills to include (comma-separated list of skill folder names)
ENABLED_SKILLS=databricks-agent-bricks,databricks-python-sdk,databricks-spark-declarative-pipelines,databricks-synthetic-data-gen,databricks-unstructured-pdf-generation
# Optional: Add additional skills (example with databricks- prefixed skills)
# ENABLED_SKILLS=databricks-agent-bricks,databricks-python-sdk,databricks-spark-declarative-pipelines,databricks-synthetic-data-gen,databricks-unstructured-pdf-generation
# Test mode: only enable Skill tool (useful for debugging)
SKILLS_ONLY_MODE=false
# =============================================================================
# Application Settings
# =============================================================================
# Projects directory (where Claude Code agent will work)
PROJECTS_BASE_DIR=./projects
# Environment (development or production)
ENV=development
# Claude SDK stream timeout in milliseconds (default: 1 hour)
# Increase if you have very long-running operations
CLAUDE_CODE_STREAM_CLOSE_TIMEOUT=3600000
# Anthropic API key (optional - uses Databricks model serving by default)
# ANTHROPIC_API_KEY=sk-ant-...
# =============================================================================
# MLflow Tracing Configuration
# =============================================================================
# Enable MLflow tracing for Claude Code conversations
# See: https://docs.databricks.com/aws/en/mlflow3/genai/tracing/integrations/claude-code
MLFLOW_TRACKING_URI=databricks
# Optional: Specify a custom experiment for traces
# MLFLOW_EXPERIMENT_NAME=/Users/your-email@company.com/claude-code-traces
# =============================================================================
# DEPLOYMENT TO DATABRICKS APPS
# =============================================================================
#
# The deploy script handles everything automatically:
# ./scripts/deploy.sh <app-name> --profile <your-profile>
#
# This provisions Lakebase, creates the app, grants permissions, and deploys.
# See README.md for full details and options.