-
Notifications
You must be signed in to change notification settings - Fork 3.5k
Expand file tree
/
Copy path.env.example
More file actions
54 lines (45 loc) · 3.76 KB
/
.env.example
File metadata and controls
54 lines (45 loc) · 3.76 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
# Database (Required)
DATABASE_URL="postgresql://postgres:password@localhost:5432/postgres"
# PostgreSQL Port (Optional) - defaults to 5432 if not specified
# POSTGRES_PORT=5432
# Authentication (Required unless DISABLE_AUTH=true)
BETTER_AUTH_SECRET=your_secret_key # Use `openssl rand -hex 32` to generate, or visit https://www.better-auth.com/docs/installation
BETTER_AUTH_URL=http://localhost:3000
# Authentication Bypass (Optional - for self-hosted deployments behind private networks)
# DISABLE_AUTH=true # Uncomment to bypass authentication entirely. Creates an anonymous session for all requests.
# NextJS (Required)
NEXT_PUBLIC_APP_URL=http://localhost:3000
# INTERNAL_API_BASE_URL=http://sim-app.default.svc.cluster.local:3000 # Optional: internal URL for server-side /api self-calls; defaults to NEXT_PUBLIC_APP_URL
# Security (Required)
ENCRYPTION_KEY=your_encryption_key # Use `openssl rand -hex 32` to generate, used to encrypt environment variables
INTERNAL_API_SECRET=your_internal_api_secret # Use `openssl rand -hex 32` to generate, used to encrypt internal api routes
API_ENCRYPTION_KEY=your_api_encryption_key # Use `openssl rand -hex 32` to generate, used to encrypt api keys
# Email Provider (Optional)
# RESEND_API_KEY= # Uncomment and add your key from https://resend.com to send actual emails
# If left commented out, emails will be logged to console instead
# Local AI Models (Optional)
# OLLAMA_URL=http://localhost:11434 # URL for local Ollama server - uncomment if using local models
# VLLM_BASE_URL=http://localhost:8000 # Base URL for your self-hosted vLLM (OpenAI-compatible)
# VLLM_API_KEY= # Optional bearer token if your vLLM instance requires auth
# Internal OpenCode Service (Optional, opt-in)
# NEXT_PUBLIC_OPENCODE_ENABLED=true # Required to show the OpenCode block in the UI
# # Leave unset to keep the block hidden and preserve the default Sim UX
# OPENCODE_BASE_URL=http://127.0.0.1:4096 # Use this when SIM runs on the host (for example, `bun run dev`) and OpenCode runs in Docker
# OPENCODE_BASE_URL=http://opencode:4096 # Use this when SIM and OpenCode both run in Docker Compose
# # Or point this to any separate OpenCode deployment that implements the same auth contract
# OPENCODE_PORT=4096
# OPENCODE_REPOSITORY_ROOT=/app/repos # Must match the repository root used by the OpenCode runtime, including external deployments
# OPENCODE_SERVER_USERNAME=opencode
# OPENCODE_SERVER_PASSWORD=change-me # Required for the internal OpenCode service
# OPENCODE_REPOS=https://github.com/org/ui-components,https://github.com/org/design-tokens
# OPENCODE_REPOS=https://dev.azure.com/org/project/_git/repo # Azure Repos over HTTPS also works
# GIT_USERNAME= # Optional HTTPS git username for private repos, including Azure Repos
# GIT_TOKEN= # Optional HTTPS git token/PAT for private repos, including Azure Repos
# GITHUB_TOKEN= # Optional GitHub token fallback for private GitHub repos
# OPENAI_API_KEY= # OpenCode can use any supported provider key from the environment
# ANTHROPIC_API_KEY= # Optional if you prefer Anthropic for OpenCode
# GEMINI_API_KEY= # Optional if you prefer Gemini for OpenCode
# GOOGLE_GENERATIVE_AI_API_KEY= # Optional explicit alias for OpenCode's Google provider; defaults from GEMINI_API_KEY in the optional compose overlays
# Admin API (Optional - for self-hosted GitOps)
# ADMIN_API_KEY= # Use `openssl rand -hex 32` to generate. Enables admin API for workflow export/import.
# Usage: curl -H "x-admin-key: your_key" https://your-instance/api/v1/admin/workspaces