Skip to content

Commit 262f9b5

Browse files
committed
feat: add Databricks AI Gateway as LLM provider
Implements Databricks serving endpoints support with PAT auth, workspace URL resolution, and OpenAI-compatible request handling. - Add databricks ProviderID to schema - Create auth plugin with PAT parsing and host validation - Add custom loader with env var fallback (DATABRICKS_HOST + DATABRICKS_TOKEN) - Register 11 foundation models (Llama, Claude, GPT, Gemini, DBRX, Mixtral) - Add 24 unit tests for host validation, PAT parsing, body transforms - E2E tests included (skipped without credentials) Closes #602 Co-Authored-By: Vijay Yadav <vjyadav194@gmail.com>
1 parent c103bfd commit 262f9b5

5 files changed

Lines changed: 453 additions & 2 deletions

File tree

Lines changed: 125 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,125 @@
1+
import type { Hooks, PluginInput } from "@opencode-ai/plugin"
2+
import { Auth, OAUTH_DUMMY_KEY } from "@/auth"
3+
4+
/**
5+
* Databricks workspace host regex.
6+
* Matches patterns like: myworkspace.cloud.databricks.com, adb-1234567890.12.azuredatabricks.net
7+
*/
8+
export const VALID_HOST_RE = /^[a-zA-Z0-9._-]+\.(cloud\.databricks\.com|azuredatabricks\.net|gcp\.databricks\.com)$/
9+
10+
/** Parse a `host::token` credential string for Databricks PAT auth. */
11+
export function parseDatabricksPAT(code: string): { host: string; token: string } | null {
12+
const sep = code.indexOf("::")
13+
if (sep === -1) return null
14+
const host = code.substring(0, sep).trim()
15+
const token = code.substring(sep + 2).trim()
16+
if (!host || !token) return null
17+
if (!VALID_HOST_RE.test(host)) return null
18+
return { host, token }
19+
}
20+
21+
/**
22+
* Transform a Databricks request body string.
23+
* Databricks Foundation Model APIs use max_tokens (OpenAI-compatible),
24+
* but some endpoints may prefer max_completion_tokens.
25+
*/
26+
export function transformDatabricksBody(bodyText: string): { body: string } {
27+
const parsed = JSON.parse(bodyText)
28+
29+
// Databricks uses max_tokens for most endpoints, but some newer ones
30+
// expect max_completion_tokens. Normalize to max_tokens for compatibility.
31+
if ("max_completion_tokens" in parsed && !("max_tokens" in parsed)) {
32+
parsed.max_tokens = parsed.max_completion_tokens
33+
delete parsed.max_completion_tokens
34+
}
35+
36+
return { body: JSON.stringify(parsed) }
37+
}
38+
39+
export async function DatabricksAuthPlugin(_input: PluginInput): Promise<Hooks> {
40+
return {
41+
auth: {
42+
provider: "databricks",
43+
async loader(getAuth, provider) {
44+
const auth = await getAuth()
45+
if (auth.type !== "oauth") return {}
46+
47+
for (const model of Object.values(provider.models)) {
48+
model.cost = { input: 0, output: 0, cache: { read: 0, write: 0 } }
49+
}
50+
51+
return {
52+
apiKey: OAUTH_DUMMY_KEY,
53+
async fetch(requestInput: RequestInfo | URL, init?: RequestInit) {
54+
const currentAuth = await getAuth()
55+
if (currentAuth.type !== "oauth") return fetch(requestInput, init)
56+
57+
const headers = new Headers()
58+
if (init?.headers) {
59+
if (init.headers instanceof Headers) {
60+
init.headers.forEach((value, key) => headers.set(key, value))
61+
} else if (Array.isArray(init.headers)) {
62+
for (const [key, value] of init.headers) {
63+
if (value !== undefined) headers.set(key, String(value))
64+
}
65+
} else {
66+
for (const [key, value] of Object.entries(init.headers)) {
67+
if (value !== undefined) headers.set(key, String(value))
68+
}
69+
}
70+
}
71+
72+
headers.set("authorization", `Bearer ${currentAuth.access}`)
73+
74+
let body = init?.body
75+
if (body) {
76+
try {
77+
let text: string
78+
if (typeof body === "string") {
79+
text = body
80+
} else if (body instanceof Uint8Array || body instanceof ArrayBuffer) {
81+
text = new TextDecoder().decode(body)
82+
} else {
83+
text = ""
84+
}
85+
if (text) {
86+
const result = transformDatabricksBody(text)
87+
body = result.body
88+
headers.delete("content-length")
89+
}
90+
} catch {
91+
// JSON parse error — pass original body through untransformed
92+
}
93+
}
94+
95+
return fetch(requestInput, { ...init, headers, body })
96+
},
97+
}
98+
},
99+
methods: [
100+
{
101+
label: "Databricks PAT",
102+
type: "oauth",
103+
authorize: async () => ({
104+
url: "https://accounts.cloud.databricks.com",
105+
instructions:
106+
"Enter your credentials as: <workspace-host>::<PAT-token>\n e.g. myworkspace.cloud.databricks.com::dapi1234567890abcdef\n Create a PAT in Databricks: Settings → Developer → Access Tokens → Generate New Token",
107+
method: "code" as const,
108+
callback: async (code: string) => {
109+
const parsed = parseDatabricksPAT(code)
110+
if (!parsed) return { type: "failed" as const }
111+
return {
112+
type: "success" as const,
113+
access: parsed.token,
114+
refresh: "",
115+
// Databricks PATs can be configured with custom TTLs; use 90-day default
116+
expires: Date.now() + 90 * 24 * 60 * 60 * 1000,
117+
accountId: parsed.host,
118+
}
119+
},
120+
}),
121+
},
122+
],
123+
},
124+
}
125+
}

packages/opencode/src/plugin/index.ts

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,9 @@ import { gitlabAuthPlugin as GitlabAuthPlugin } from "@gitlab/opencode-gitlab-au
1515
// altimate_change start — snowflake cortex plugin import
1616
import { SnowflakeCortexAuthPlugin } from "../altimate/plugin/snowflake"
1717
// altimate_change end
18+
// altimate_change start — databricks plugin import
19+
import { DatabricksAuthPlugin } from "../altimate/plugin/databricks"
20+
// altimate_change end
1821
// altimate_change start — altimate backend auth plugin
1922
import { AltimateAuthPlugin } from "../altimate/plugin/altimate"
2023
// altimate_change end
@@ -28,8 +31,8 @@ export namespace Plugin {
2831
// GitlabAuthPlugin uses a different version of @opencode-ai/plugin (from npm)
2932
// vs the workspace version, causing a type mismatch on internal HeyApiClient.
3033
// The types are structurally compatible at runtime.
31-
// altimate_change start — snowflake cortex and altimate backend internal plugins
32-
const INTERNAL_PLUGINS: PluginInstance[] = [CodexAuthPlugin, CopilotAuthPlugin, GitlabAuthPlugin as unknown as PluginInstance, SnowflakeCortexAuthPlugin, AltimateAuthPlugin]
34+
// altimate_change start — snowflake cortex, databricks, and altimate backend internal plugins
35+
const INTERNAL_PLUGINS: PluginInstance[] = [CodexAuthPlugin, CopilotAuthPlugin, GitlabAuthPlugin as unknown as PluginInstance, SnowflakeCortexAuthPlugin, DatabricksAuthPlugin, AltimateAuthPlugin]
3336
// altimate_change end
3437

3538
const state = Instance.state(async () => {

packages/opencode/src/provider/provider.ts

Lines changed: 93 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,9 @@ import { ModelID, ProviderID } from "./schema"
5050
// altimate_change start — snowflake cortex account validation
5151
import { VALID_ACCOUNT_RE } from "../altimate/plugin/snowflake"
5252
// altimate_change end
53+
// altimate_change start — databricks host validation
54+
import { VALID_HOST_RE } from "../altimate/plugin/databricks"
55+
// altimate_change end
5356

5457
const DEFAULT_CHUNK_TIMEOUT = 120_000
5558

@@ -733,6 +736,32 @@ export namespace Provider {
733736
}
734737
},
735738
// altimate_change end
739+
// altimate_change start — databricks provider loader
740+
databricks: async () => {
741+
const auth = await Auth.get("databricks")
742+
if (auth?.type !== "oauth") {
743+
// Fall back to env-based config
744+
const host = Env.get("DATABRICKS_HOST")
745+
const token = Env.get("DATABRICKS_TOKEN")
746+
if (!host || !token) return { autoload: false }
747+
return {
748+
autoload: true,
749+
options: {
750+
baseURL: `https://${host}/serving-endpoints`,
751+
apiKey: token,
752+
},
753+
}
754+
}
755+
const host = auth.accountId ?? Env.get("DATABRICKS_HOST")
756+
if (!host || !VALID_HOST_RE.test(host)) return { autoload: false }
757+
return {
758+
autoload: true,
759+
options: {
760+
baseURL: `https://${host}/serving-endpoints`,
761+
},
762+
}
763+
},
764+
// altimate_change end
736765
}
737766

738767
export const Model = z
@@ -1019,6 +1048,70 @@ export namespace Provider {
10191048
}
10201049
// altimate_change end
10211050

1051+
// altimate_change start — databricks provider models
1052+
function makeDatabricksModel(
1053+
id: string,
1054+
name: string,
1055+
limits: { context: number; output: number },
1056+
caps?: { reasoning?: boolean; attachment?: boolean; toolcall?: boolean; image?: boolean },
1057+
): Model {
1058+
const m: Model = {
1059+
id: ModelID.make(id),
1060+
providerID: ProviderID.databricks,
1061+
api: {
1062+
id,
1063+
url: "",
1064+
npm: "@ai-sdk/openai-compatible",
1065+
},
1066+
name,
1067+
capabilities: {
1068+
temperature: true,
1069+
reasoning: caps?.reasoning ?? false,
1070+
attachment: caps?.attachment ?? false,
1071+
toolcall: caps?.toolcall ?? true,
1072+
input: { text: true, audio: false, image: caps?.image ?? false, video: false, pdf: false },
1073+
output: { text: true, audio: false, image: false, video: false, pdf: false },
1074+
interleaved: false,
1075+
},
1076+
cost: { input: 0, output: 0, cache: { read: 0, write: 0 } },
1077+
limit: { context: limits.context, output: limits.output },
1078+
status: "active" as const,
1079+
options: {},
1080+
headers: {},
1081+
release_date: "2024-01-01",
1082+
variants: {},
1083+
}
1084+
m.variants = mapValues(ProviderTransform.variants(m), (v) => v)
1085+
return m
1086+
}
1087+
1088+
database["databricks"] = {
1089+
id: ProviderID.databricks,
1090+
source: "custom",
1091+
name: "Databricks",
1092+
env: ["DATABRICKS_TOKEN"],
1093+
options: {},
1094+
models: {
1095+
// Meta Llama models — tool calling supported
1096+
"databricks-meta-llama-3-1-405b-instruct": makeDatabricksModel("databricks-meta-llama-3-1-405b-instruct", "Meta Llama 3.1 405B Instruct", { context: 128000, output: 4096 }),
1097+
"databricks-meta-llama-3-1-70b-instruct": makeDatabricksModel("databricks-meta-llama-3-1-70b-instruct", "Meta Llama 3.1 70B Instruct", { context: 128000, output: 4096 }),
1098+
"databricks-meta-llama-3-1-8b-instruct": makeDatabricksModel("databricks-meta-llama-3-1-8b-instruct", "Meta Llama 3.1 8B Instruct", { context: 128000, output: 4096 }),
1099+
// Claude models via Databricks AI Gateway
1100+
"databricks-claude-sonnet-4-6": makeDatabricksModel("databricks-claude-sonnet-4-6", "Claude Sonnet 4.6", { context: 200000, output: 64000 }),
1101+
"databricks-claude-opus-4-6": makeDatabricksModel("databricks-claude-opus-4-6", "Claude Opus 4.6", { context: 200000, output: 32000 }),
1102+
// GPT models via Databricks AI Gateway
1103+
"databricks-gpt-5-4": makeDatabricksModel("databricks-gpt-5-4", "GPT-5-4", { context: 128000, output: 16384 }),
1104+
"databricks-gpt-5-mini": makeDatabricksModel("databricks-gpt-5-mini", "GPT-5 Mini", { context: 128000, output: 16384 }),
1105+
// Gemini models via Databricks AI Gateway
1106+
"databricks-gemini-3-1-pro": makeDatabricksModel("databricks-gemini-3-1-pro", "Gemini 3.1 Pro", { context: 1000000, output: 8192 }),
1107+
// DBRX — Databricks native model
1108+
"databricks-dbrx-instruct": makeDatabricksModel("databricks-dbrx-instruct", "DBRX Instruct", { context: 32768, output: 4096 }),
1109+
// Mixtral via Databricks
1110+
"databricks-mixtral-8x7b-instruct": makeDatabricksModel("databricks-mixtral-8x7b-instruct", "Mixtral 8x7B Instruct", { context: 32768, output: 4096 }, { toolcall: false }),
1111+
},
1112+
}
1113+
// altimate_change end
1114+
10221115
// altimate_change start — register altimate-backend as an OpenAI-compatible provider
10231116
if (!database["altimate-backend"]) {
10241117
const backendModels: Record<string, Model> = {

packages/opencode/src/provider/schema.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,9 @@ export const ProviderID = providerIdSchema.pipe(
2626
// altimate_change start — snowflake cortex provider ID
2727
snowflakeCortex: schema.makeUnsafe("snowflake-cortex"),
2828
// altimate_change end
29+
// altimate_change start — databricks provider ID
30+
databricks: schema.makeUnsafe("databricks"),
31+
// altimate_change end
2932
})),
3033
)
3134

0 commit comments

Comments
 (0)