Compare commits

...

2 Commits

Author SHA1 Message Date
Kit Langton
613a8d1beb test(provider): migrate more config-backed cases (#26984) 2026-05-11 22:21:50 -04:00
Kit Langton
708557880b test(provider): migrate provider tests to effect runner 2026-05-11 22:13:26 -04:00

View File

@@ -15,6 +15,7 @@ import { Env } from "../../src/env"
import { Effect } from "effect"
import { AppRuntime } from "../../src/effect/app-runtime"
import { makeRuntime } from "../../src/effect/run-service"
import { testEffect } from "../lib/effect"
const env = makeRuntime(Env.Service, Env.defaultLayer)
const set = (k: string, v: string) => env.runSync((svc) => svc.set(k, v))
@@ -70,6 +71,8 @@ function paid(providers: Awaited<ReturnType<typeof list>>) {
return Object.values(item.models).filter((model) => model.cost.input > 0).length
}
const it = testEffect(Provider.defaultLayer)
test("provider loaded from env variable", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
@@ -515,144 +518,116 @@ test("defaultModel respects config model setting", async () => {
})
})
test("provider with baseURL from config", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
await Bun.write(
path.join(dir, "opencode.json"),
JSON.stringify({
$schema: "https://opencode.ai/config.json",
provider: {
"custom-openai": {
name: "Custom OpenAI",
npm: "@ai-sdk/openai-compatible",
env: [],
models: {
"gpt-4": {
name: "GPT-4",
tool_call: true,
limit: { context: 128000, output: 4096 },
},
},
it.instance(
"provider with baseURL from config",
Effect.gen(function* () {
const providers = yield* Provider.Service.use((provider) => provider.list())
expect(providers[ProviderID.make("custom-openai")]).toBeDefined()
expect(providers[ProviderID.make("custom-openai")].options.baseURL).toBe("https://custom.openai.com/v1")
}),
{
config: {
provider: {
"custom-openai": {
name: "Custom OpenAI",
npm: "@ai-sdk/openai-compatible",
env: [],
models: {
"gpt-4": {
name: "GPT-4",
tool_call: true,
limit: { context: 128000, output: 4096 },
},
},
options: {
apiKey: "test-key",
baseURL: "https://custom.openai.com/v1",
},
},
},
},
},
)
it.instance(
"model cost defaults to zero when not specified",
Effect.gen(function* () {
const providers = yield* Provider.Service.use((provider) => provider.list())
const model = providers[ProviderID.make("test-provider")].models["test-model"]
expect(model.cost.input).toBe(0)
expect(model.cost.output).toBe(0)
expect(model.cost.cache.read).toBe(0)
expect(model.cost.cache.write).toBe(0)
}),
{
config: {
provider: {
"test-provider": {
name: "Test Provider",
npm: "@ai-sdk/openai-compatible",
env: [],
models: {
"test-model": {
name: "Test Model",
tool_call: true,
limit: { context: 128000, output: 4096 },
},
},
options: {
apiKey: "test-key",
},
},
},
},
},
)
it.instance(
"model options are merged from existing model",
Effect.gen(function* () {
const providers = yield* Provider.Service.use((provider) => provider.list())
const model = providers[ProviderID.anthropic].models["claude-sonnet-4-20250514"]
expect(model.options.customOption).toBe("custom-value")
}),
{
config: {
provider: {
anthropic: {
options: {
apiKey: "test-api-key",
},
models: {
"claude-sonnet-4-20250514": {
options: {
apiKey: "test-key",
baseURL: "https://custom.openai.com/v1",
customOption: "custom-value",
},
},
},
}),
)
},
},
},
})
await WithInstance.provide({
directory: tmp.path,
fn: async () => {
const providers = await list()
expect(providers[ProviderID.make("custom-openai")]).toBeDefined()
expect(providers[ProviderID.make("custom-openai")].options.baseURL).toBe("https://custom.openai.com/v1")
},
})
})
},
)
test("model cost defaults to zero when not specified", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
await Bun.write(
path.join(dir, "opencode.json"),
JSON.stringify({
$schema: "https://opencode.ai/config.json",
provider: {
"test-provider": {
name: "Test Provider",
npm: "@ai-sdk/openai-compatible",
env: [],
models: {
"test-model": {
name: "Test Model",
tool_call: true,
limit: { context: 128000, output: 4096 },
},
},
options: {
apiKey: "test-key",
},
},
it.instance(
"provider removed when all models filtered out",
Effect.gen(function* () {
const providers = yield* Provider.Service.use((provider) => provider.list())
expect(providers[ProviderID.anthropic]).toBeUndefined()
}),
{
config: {
provider: {
anthropic: {
options: {
apiKey: "test-api-key",
},
}),
)
whitelist: ["nonexistent-model"],
},
},
},
})
await WithInstance.provide({
directory: tmp.path,
fn: async () => {
const providers = await list()
const model = providers[ProviderID.make("test-provider")].models["test-model"]
expect(model.cost.input).toBe(0)
expect(model.cost.output).toBe(0)
expect(model.cost.cache.read).toBe(0)
expect(model.cost.cache.write).toBe(0)
},
})
})
test("model options are merged from existing model", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
await Bun.write(
path.join(dir, "opencode.json"),
JSON.stringify({
$schema: "https://opencode.ai/config.json",
provider: {
anthropic: {
models: {
"claude-sonnet-4-20250514": {
options: {
customOption: "custom-value",
},
},
},
},
},
}),
)
},
})
await WithInstance.provide({
directory: tmp.path,
fn: async () => {
set("ANTHROPIC_API_KEY", "test-api-key")
const providers = await list()
const model = providers[ProviderID.anthropic].models["claude-sonnet-4-20250514"]
expect(model.options.customOption).toBe("custom-value")
},
})
})
test("provider removed when all models filtered out", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
await Bun.write(
path.join(dir, "opencode.json"),
JSON.stringify({
$schema: "https://opencode.ai/config.json",
provider: {
anthropic: {
whitelist: ["nonexistent-model"],
},
},
}),
)
},
})
await WithInstance.provide({
directory: tmp.path,
fn: async () => {
set("ANTHROPIC_API_KEY", "test-api-key")
const providers = await list()
expect(providers[ProviderID.anthropic]).toBeUndefined()
},
})
})
},
)
test("closest finds model by partial match", async () => {
await using tmp = await tmpdir({