Skip to content

Commit 8da75e1

Browse files
kitlangtonleohenon
authored andcommitted
test(provider): migrate config-backed cases to Effect runner (anomalyco#26969)
1 parent 11a0a97 commit 8da75e1

1 file changed

Lines changed: 60 additions & 73 deletions

File tree

packages/opencode/test/provider/provider.test.ts

Lines changed: 60 additions & 73 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ import { Env } from "../../src/env"
1515
import { Effect } from "effect"
1616
import { AppRuntime } from "../../src/effect/app-runtime"
1717
import { makeRuntime } from "../../src/effect/run-service"
18+
import { testEffect } from "../lib/effect"
1819

1920
const env = makeRuntime(Env.Service, Env.defaultLayer)
2021
const set = (k: string, v: string) => env.runSync((svc) => svc.set(k, v))
@@ -70,6 +71,8 @@ function paid(providers: Awaited<ReturnType<typeof list>>) {
7071
return Object.values(item.models).filter((model) => model.cost.input > 0).length
7172
}
7273

74+
const it = testEffect(Provider.defaultLayer)
75+
7376
test("provider loaded from env variable", async () => {
7477
await using tmp = await tmpdir({
7578
init: async (dir) => {
@@ -515,85 +518,69 @@ test("defaultModel respects config model setting", async () => {
515518
})
516519
})
517520

518-
test("provider with baseURL from config", async () => {
519-
await using tmp = await tmpdir({
520-
init: async (dir) => {
521-
await Bun.write(
522-
path.join(dir, "opencode.json"),
523-
JSON.stringify({
524-
$schema: "https://opencode.ai/config.json",
525-
provider: {
526-
"custom-openai": {
527-
name: "Custom OpenAI",
528-
npm: "@ai-sdk/openai-compatible",
529-
env: [],
530-
models: {
531-
"gpt-4": {
532-
name: "GPT-4",
533-
tool_call: true,
534-
limit: { context: 128000, output: 4096 },
535-
},
536-
},
537-
options: {
538-
apiKey: "test-key",
539-
baseURL: "https://custom.openai.com/v1",
540-
},
521+
it.instance(
522+
"provider with baseURL from config",
523+
Effect.gen(function* () {
524+
const providers = yield* Provider.Service.use((provider) => provider.list())
525+
expect(providers[ProviderID.make("custom-openai")]).toBeDefined()
526+
expect(providers[ProviderID.make("custom-openai")].options.baseURL).toBe("https://custom.openai.com/v1")
527+
}),
528+
{
529+
config: {
530+
provider: {
531+
"custom-openai": {
532+
name: "Custom OpenAI",
533+
npm: "@ai-sdk/openai-compatible",
534+
env: [],
535+
models: {
536+
"gpt-4": {
537+
name: "GPT-4",
538+
tool_call: true,
539+
limit: { context: 128000, output: 4096 },
541540
},
542541
},
543-
}),
544-
)
545-
},
546-
})
547-
await WithInstance.provide({
548-
directory: tmp.path,
549-
fn: async () => {
550-
const providers = await list()
551-
expect(providers[ProviderID.make("custom-openai")]).toBeDefined()
552-
expect(providers[ProviderID.make("custom-openai")].options.baseURL).toBe("https://custom.openai.com/v1")
542+
options: {
543+
apiKey: "test-key",
544+
baseURL: "https://custom.openai.com/v1",
545+
},
546+
},
547+
},
553548
},
554-
})
555-
})
556-
557-
test("model cost defaults to zero when not specified", async () => {
558-
await using tmp = await tmpdir({
559-
init: async (dir) => {
560-
await Bun.write(
561-
path.join(dir, "opencode.json"),
562-
JSON.stringify({
563-
$schema: "https://opencode.ai/config.json",
564-
provider: {
565-
"test-provider": {
566-
name: "Test Provider",
567-
npm: "@ai-sdk/openai-compatible",
568-
env: [],
569-
models: {
570-
"test-model": {
571-
name: "Test Model",
572-
tool_call: true,
573-
limit: { context: 128000, output: 4096 },
574-
},
575-
},
576-
options: {
577-
apiKey: "test-key",
578-
},
549+
},
550+
)
551+
552+
it.instance(
553+
"model cost defaults to zero when not specified",
554+
Effect.gen(function* () {
555+
const providers = yield* Provider.Service.use((provider) => provider.list())
556+
const model = providers[ProviderID.make("test-provider")].models["test-model"]
557+
expect(model.cost.input).toBe(0)
558+
expect(model.cost.output).toBe(0)
559+
expect(model.cost.cache.read).toBe(0)
560+
expect(model.cost.cache.write).toBe(0)
561+
}),
562+
{
563+
config: {
564+
provider: {
565+
"test-provider": {
566+
name: "Test Provider",
567+
npm: "@ai-sdk/openai-compatible",
568+
env: [],
569+
models: {
570+
"test-model": {
571+
name: "Test Model",
572+
tool_call: true,
573+
limit: { context: 128000, output: 4096 },
579574
},
580575
},
581-
}),
582-
)
583-
},
584-
})
585-
await WithInstance.provide({
586-
directory: tmp.path,
587-
fn: async () => {
588-
const providers = await list()
589-
const model = providers[ProviderID.make("test-provider")].models["test-model"]
590-
expect(model.cost.input).toBe(0)
591-
expect(model.cost.output).toBe(0)
592-
expect(model.cost.cache.read).toBe(0)
593-
expect(model.cost.cache.write).toBe(0)
576+
options: {
577+
apiKey: "test-key",
578+
},
579+
},
580+
},
594581
},
595-
})
596-
})
582+
},
583+
)
597584

598585
test("model options are merged from existing model", async () => {
599586
await using tmp = await tmpdir({

0 commit comments

Comments
 (0)