chore: generate

This commit is contained in:
opencode-agent[bot]
2026-05-08 20:57:36 +00:00
parent 5bb7b23440
commit ba8c920639
19 changed files with 49 additions and 75 deletions

View File

@@ -90,7 +90,10 @@ export const layer = (options: { readonly directory?: string } = {}) =>
return (yield* walk(directory))
.filter((file) => file.endsWith(".json"))
.map((file) => ({
name: path.relative(directory, file).replace(/\\/g, "/").replace(/\.json$/, ""),
name: path
.relative(directory, file)
.replace(/\\/g, "/")
.replace(/\.json$/, ""),
path: file,
}))
.toSorted((a, b) => a.name.localeCompare(b.name))

View File

@@ -65,7 +65,11 @@ const redactionSet = (values: ReadonlyArray<string> | undefined, defaults: Reado
export type UrlRedactor = (url: string) => string
export const redactUrl = (raw: string, query: ReadonlyArray<string> = DEFAULT_REDACT_QUERY, urlRedactor?: UrlRedactor) => {
export const redactUrl = (
raw: string,
query: ReadonlyArray<string> = DEFAULT_REDACT_QUERY,
urlRedactor?: UrlRedactor,
) => {
if (!URL.canParse(raw)) return urlRedactor?.(raw) ?? raw
const url = new URL(raw)
if (url.username) url.username = REDACTED

View File

@@ -64,10 +64,8 @@ describe("http-recorder", () => {
test("applies custom URL redaction after built-in redaction", () => {
expect(
HttpRecorder.redactUrl(
"https://example.test/accounts/real-account/path?key=secret-key",
undefined,
(url) => url.replace("/accounts/real-account/", "/accounts/{account}/"),
HttpRecorder.redactUrl("https://example.test/accounts/real-account/path?key=secret-key", undefined, (url) =>
url.replace("/accounts/real-account/", "/accounts/{account}/"),
),
).toBe("https://example.test/accounts/{account}/path?key=%5BREDACTED%5D")
})

View File

@@ -28,7 +28,7 @@ const request = LLM.request({
prompt: "Say hello.",
})
const response = yield* LLMClient.generate(request)
const response = yield * LLMClient.generate(request)
```
`LLM.request(...)` builds an `LLMRequest`. `LLMClient.generate(...)` selects a registered route by `request.model.route`, builds the provider-native body, asks the route's transport for a real `HttpClientRequest.HttpClientRequest`, sends it through `RequestExecutor.Service`, parses the provider stream into common `LLMEvent`s, and finally returns an `LLMResponse`.

View File

@@ -110,7 +110,12 @@ const PROVIDERS: ReadonlyArray<Provider> = [
note: "Cloudflare Unified/OpenAI-compatible gateway; supports provider/model ids like workers-ai/@cf/...",
vars: [
{ name: "CLOUDFLARE_ACCOUNT_ID", label: "Cloudflare account ID", secret: false },
{ name: "CLOUDFLARE_GATEWAY_ID", label: "Cloudflare AI Gateway ID (defaults to default)", optional: true, secret: false },
{
name: "CLOUDFLARE_GATEWAY_ID",
label: "Cloudflare AI Gateway ID (defaults to default)",
optional: true,
secret: false,
},
{ name: "CLOUDFLARE_API_TOKEN", label: "Cloudflare AI Gateway token" },
],
validate: (env) =>

View File

@@ -1,10 +1,5 @@
import { Effect, JsonSchema, Schema } from "effect"
import {
LLMClient,
modelLimits,
modelRef,
type ModelRefInput,
} from "./route/client"
import { LLMClient, modelLimits, modelRef, type ModelRefInput } from "./route/client"
import {
GenerationOptions,
HttpOptions,
@@ -196,10 +191,10 @@ const runGenerateObject = Effect.fn("LLM.generateObject")(function* (
export function generateObject<S extends ToolSchema<any>>(
options: GenerateObjectOptions<S>,
): Effect.Effect<GenerateObjectResponse<Schema.Schema.Type<S>>, LLMError>
export function generateObject(options: GenerateObjectDynamicOptions): Effect.Effect<GenerateObjectResponse<unknown>, LLMError>
export function generateObject(
options: GenerateObjectOptions<ToolSchema<any>> | GenerateObjectDynamicOptions,
) {
options: GenerateObjectDynamicOptions,
): Effect.Effect<GenerateObjectResponse<unknown>, LLMError>
export function generateObject(options: GenerateObjectOptions<ToolSchema<any>> | GenerateObjectDynamicOptions) {
if ("schema" in options) {
const { schema, ...rest } = options
return runGenerateObject(

View File

@@ -7,8 +7,10 @@ export const id = ProviderID.make("anthropic")
export const routes = [AnthropicMessages.route]
export const model = (id: string | ModelID, options: Omit<RouteModelInput, "id" | "baseURL"> & { readonly baseURL?: string } = {}) =>
AnthropicMessages.model({ ...options, id })
export const model = (
id: string | ModelID,
options: Omit<RouteModelInput, "id" | "baseURL"> & { readonly baseURL?: string } = {},
) => AnthropicMessages.model({ ...options, id })
export const provider = Provider.make({
id,

View File

@@ -7,8 +7,10 @@ export const id = ProviderID.make("google")
export const routes = [Gemini.route]
export const model = (id: string | ModelID, options: Omit<RouteModelInput, "id" | "baseURL"> & { readonly baseURL?: string } = {}) =>
Gemini.model({ ...options, id })
export const model = (
id: string | ModelID,
options: Omit<RouteModelInput, "id" | "baseURL"> & { readonly baseURL?: string } = {},
) => Gemini.model({ ...options, id })
export const provider = Provider.make({
id,

View File

@@ -168,8 +168,9 @@ export function bearerHeader(
name: string,
source?: string | Redacted.Redacted<string> | Config.Config<string | Redacted.Redacted<string>> | Credential,
) {
const render = (input: string | Redacted.Redacted<string> | Config.Config<string | Redacted.Redacted<string>> | Credential) =>
fromCredential(credentialInput(input), (secret) => ({ [name]: `Bearer ${secret}` }))
const render = (
input: string | Redacted.Redacted<string> | Config.Config<string | Redacted.Redacted<string>> | Credential,
) => fromCredential(credentialInput(input), (secret) => ({ [name]: `Bearer ${secret}` }))
if (source === undefined) return render
return render(source)
}

View File

@@ -142,9 +142,7 @@ const modelWithDefaults =
if (!provider) throw new Error(`Route.model(${route.id}) requires a provider`)
const baseURL = mapped.baseURL ?? defaults.baseURL ?? route.defaults.baseURL
if (!baseURL)
throw new Error(
`Route.model(${route.id}) requires a baseURL — supply it via input, defaults, or route defaults`,
)
throw new Error(`Route.model(${route.id}) requires a baseURL — supply it via input, defaults, or route defaults`)
const generation = mergeGenerationOptions(route.defaults.generation, defaults.generation)
const providerOptions = mergeProviderOptions(route.defaults.providerOptions, defaults.providerOptions)
const http = mergeHttpOptions(httpOptions(route.defaults.http), httpOptions(defaults.http))

View File

@@ -30,9 +30,7 @@ const renderPart = <Body>(part: EndpointPart<Body>, input: EndpointInput<Body>)
typeof part === "function" ? part(input) : part
export const render = <Body>(endpoint: Endpoint<Body>, input: EndpointInput<Body>) => {
const url = new URL(
`${ProviderShared.trimBaseUrl(input.request.model.baseURL)}${renderPart(endpoint.path, input)}`,
)
const url = new URL(`${ProviderShared.trimBaseUrl(input.request.model.baseURL)}${renderPart(endpoint.path, input)}`)
const params = input.request.model.queryParams
if (params) for (const [key, value] of Object.entries(params)) url.searchParams.set(key, value)
return url

View File

@@ -2,12 +2,7 @@ import { describe, expect, test } from "bun:test"
import { LLM } from "../src"
import { Endpoint } from "../src/route"
const request = (
input: {
readonly baseURL: string
readonly queryParams?: Record<string, string>
},
) =>
const request = (input: { readonly baseURL: string; readonly queryParams?: Record<string, string> }) =>
LLM.request({
model: LLM.model({
id: "model-1",
@@ -43,7 +38,9 @@ describe("Endpoint", () => {
test("path may be a function of the validated body", () => {
const url = Endpoint.render(
Endpoint.path<{ readonly modelId: string }>(({ body }) => `/model/${encodeURIComponent(body.modelId)}/converse-stream`),
Endpoint.path<{ readonly modelId: string }>(
({ body }) => `/model/${encodeURIComponent(body.modelId)}/converse-stream`,
),
{
request: request({ baseURL: "https://bedrock-runtime.us-east-1.amazonaws.com" }),
body: { modelId: "us.amazon.nova-micro-v1:0" },

View File

@@ -7,13 +7,7 @@
"route": "cloudflare-ai-gateway",
"transport": "http",
"model": "workers-ai/@cf/openai/gpt-oss-20b",
"tags": [
"prefix:cloudflare-ai-gateway",
"provider:cloudflare-ai-gateway",
"tool",
"tool-call",
"golden"
]
"tags": ["prefix:cloudflare-ai-gateway", "provider:cloudflare-ai-gateway", "tool", "tool-call", "golden"]
},
"interactions": [
{

View File

@@ -7,12 +7,7 @@
"route": "cloudflare-ai-gateway",
"transport": "http",
"model": "workers-ai/@cf/meta/llama-3.1-8b-instruct",
"tags": [
"prefix:cloudflare-ai-gateway",
"provider:cloudflare-ai-gateway",
"text",
"golden"
]
"tags": ["prefix:cloudflare-ai-gateway", "provider:cloudflare-ai-gateway", "text", "golden"]
},
"interactions": [
{

View File

@@ -7,13 +7,7 @@
"route": "cloudflare-workers-ai",
"transport": "http",
"model": "@cf/openai/gpt-oss-20b",
"tags": [
"prefix:cloudflare-workers-ai",
"provider:cloudflare-workers-ai",
"tool",
"tool-call",
"golden"
]
"tags": ["prefix:cloudflare-workers-ai", "provider:cloudflare-workers-ai", "tool", "tool-call", "golden"]
},
"interactions": [
{

View File

@@ -7,12 +7,7 @@
"route": "cloudflare-workers-ai",
"transport": "http",
"model": "@cf/meta/llama-3.1-8b-instruct",
"tags": [
"prefix:cloudflare-workers-ai",
"provider:cloudflare-workers-ai",
"text",
"golden"
]
"tags": ["prefix:cloudflare-workers-ai", "provider:cloudflare-workers-ai", "text", "golden"]
},
"interactions": [
{

View File

@@ -164,7 +164,10 @@ describe("LLM.generateObject", () => {
const layer = dynamicResponse((input) =>
Effect.sync(() =>
input.respond(
sseEvents(toolCallChunk("call_1", "generate_object", '{"value":"not-a-number"}'), finishChunk("tool_calls")),
sseEvents(
toolCallChunk("call_1", "generate_object", '{"value":"not-a-number"}'),
finishChunk("tool_calls"),
),
{ headers: { "content-type": "text/event-stream" } },
),
),

View File

@@ -181,9 +181,7 @@ describe("Cloudflare", () => {
dynamicResponse((input) =>
Effect.gen(function* () {
const web = yield* HttpClientRequest.toWeb(input.request).pipe(Effect.orDie)
expect(web.url).toBe(
"https://api.cloudflare.com/client/v4/accounts/test-account/ai/v1/chat/completions",
)
expect(web.url).toBe("https://api.cloudflare.com/client/v4/accounts/test-account/ai/v1/chat/completions")
expect(web.headers.get("authorization")).toBe("Bearer test-token")
expect(decodeJson(input.text)).toMatchObject({
model: "@cf/meta/llama-3.1-8b-instruct",

View File

@@ -1,14 +1,6 @@
import { describe, expect, test } from "bun:test"
import { Schema } from "effect"
import {
ContentPart,
LLMEvent,
LLMRequest,
ModelID,
ModelLimits,
ModelRef,
ProviderID,
} from "../src/schema"
import { ContentPart, LLMEvent, LLMRequest, ModelID, ModelLimits, ModelRef, ProviderID } from "../src/schema"
const model = new ModelRef({
id: ModelID.make("fake-model"),