From ba8c920639e1cda59edcc5a559a6636991eaf629 Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Fri, 8 May 2026 20:57:36 +0000 Subject: [PATCH] chore: generate --- packages/http-recorder/src/cassette.ts | 5 ++++- packages/http-recorder/src/redaction.ts | 6 +++++- packages/http-recorder/test/record-replay.test.ts | 6 ++---- packages/llm/AGENTS.md | 2 +- packages/llm/script/setup-recording-env.ts | 7 ++++++- packages/llm/src/llm.ts | 13 ++++--------- packages/llm/src/providers/anthropic.ts | 6 ++++-- packages/llm/src/providers/google.ts | 6 ++++-- packages/llm/src/route/auth.ts | 5 +++-- packages/llm/src/route/client.ts | 4 +--- packages/llm/src/route/endpoint.ts | 4 +--- packages/llm/test/endpoint.test.ts | 11 ++++------- ...eway-workers-ai-gpt-oss-20b-tools-tool-call.json | 8 +------- ...are-ai-gateway-workers-ai-llama-3-1-8b-text.json | 7 +------ ...lare-workers-ai-gpt-oss-20b-tools-tool-call.json | 8 +------- .../cloudflare-workers-ai-llama-3-1-8b-text.json | 7 +------ packages/llm/test/generate-object.test.ts | 5 ++++- packages/llm/test/provider/cloudflare.test.ts | 4 +--- packages/llm/test/schema.test.ts | 10 +--------- 19 files changed, 49 insertions(+), 75 deletions(-) diff --git a/packages/http-recorder/src/cassette.ts b/packages/http-recorder/src/cassette.ts index 23f1ba4e68..769bcc7c70 100644 --- a/packages/http-recorder/src/cassette.ts +++ b/packages/http-recorder/src/cassette.ts @@ -90,7 +90,10 @@ export const layer = (options: { readonly directory?: string } = {}) => return (yield* walk(directory)) .filter((file) => file.endsWith(".json")) .map((file) => ({ - name: path.relative(directory, file).replace(/\\/g, "/").replace(/\.json$/, ""), + name: path + .relative(directory, file) + .replace(/\\/g, "/") + .replace(/\.json$/, ""), path: file, })) .toSorted((a, b) => a.name.localeCompare(b.name)) diff --git a/packages/http-recorder/src/redaction.ts b/packages/http-recorder/src/redaction.ts index 062ea61dc7..3a8b097839 100644 --- a/packages/http-recorder/src/redaction.ts +++ b/packages/http-recorder/src/redaction.ts @@ -65,7 +65,11 @@ const redactionSet = (values: ReadonlyArray | undefined, defaults: Reado export type UrlRedactor = (url: string) => string -export const redactUrl = (raw: string, query: ReadonlyArray = DEFAULT_REDACT_QUERY, urlRedactor?: UrlRedactor) => { +export const redactUrl = ( + raw: string, + query: ReadonlyArray = DEFAULT_REDACT_QUERY, + urlRedactor?: UrlRedactor, +) => { if (!URL.canParse(raw)) return urlRedactor?.(raw) ?? raw const url = new URL(raw) if (url.username) url.username = REDACTED diff --git a/packages/http-recorder/test/record-replay.test.ts b/packages/http-recorder/test/record-replay.test.ts index 2f118a88c1..676422e6a4 100644 --- a/packages/http-recorder/test/record-replay.test.ts +++ b/packages/http-recorder/test/record-replay.test.ts @@ -64,10 +64,8 @@ describe("http-recorder", () => { test("applies custom URL redaction after built-in redaction", () => { expect( - HttpRecorder.redactUrl( - "https://example.test/accounts/real-account/path?key=secret-key", - undefined, - (url) => url.replace("/accounts/real-account/", "/accounts/{account}/"), + HttpRecorder.redactUrl("https://example.test/accounts/real-account/path?key=secret-key", undefined, (url) => + url.replace("/accounts/real-account/", "/accounts/{account}/"), ), ).toBe("https://example.test/accounts/{account}/path?key=%5BREDACTED%5D") }) diff --git a/packages/llm/AGENTS.md b/packages/llm/AGENTS.md index 61d57cf06b..b20847da3b 100644 --- a/packages/llm/AGENTS.md +++ b/packages/llm/AGENTS.md @@ -28,7 +28,7 @@ const request = LLM.request({ prompt: "Say hello.", }) -const response = yield* LLMClient.generate(request) +const response = yield * LLMClient.generate(request) ``` `LLM.request(...)` builds an `LLMRequest`. `LLMClient.generate(...)` selects a registered route by `request.model.route`, builds the provider-native body, asks the route's transport for a real `HttpClientRequest.HttpClientRequest`, sends it through `RequestExecutor.Service`, parses the provider stream into common `LLMEvent`s, and finally returns an `LLMResponse`. diff --git a/packages/llm/script/setup-recording-env.ts b/packages/llm/script/setup-recording-env.ts index 945f2b2ada..d32769b3ce 100644 --- a/packages/llm/script/setup-recording-env.ts +++ b/packages/llm/script/setup-recording-env.ts @@ -110,7 +110,12 @@ const PROVIDERS: ReadonlyArray = [ note: "Cloudflare Unified/OpenAI-compatible gateway; supports provider/model ids like workers-ai/@cf/...", vars: [ { name: "CLOUDFLARE_ACCOUNT_ID", label: "Cloudflare account ID", secret: false }, - { name: "CLOUDFLARE_GATEWAY_ID", label: "Cloudflare AI Gateway ID (defaults to default)", optional: true, secret: false }, + { + name: "CLOUDFLARE_GATEWAY_ID", + label: "Cloudflare AI Gateway ID (defaults to default)", + optional: true, + secret: false, + }, { name: "CLOUDFLARE_API_TOKEN", label: "Cloudflare AI Gateway token" }, ], validate: (env) => diff --git a/packages/llm/src/llm.ts b/packages/llm/src/llm.ts index 21d88302ed..bca78c888a 100644 --- a/packages/llm/src/llm.ts +++ b/packages/llm/src/llm.ts @@ -1,10 +1,5 @@ import { Effect, JsonSchema, Schema } from "effect" -import { - LLMClient, - modelLimits, - modelRef, - type ModelRefInput, -} from "./route/client" +import { LLMClient, modelLimits, modelRef, type ModelRefInput } from "./route/client" import { GenerationOptions, HttpOptions, @@ -196,10 +191,10 @@ const runGenerateObject = Effect.fn("LLM.generateObject")(function* ( export function generateObject>( options: GenerateObjectOptions, ): Effect.Effect>, LLMError> -export function generateObject(options: GenerateObjectDynamicOptions): Effect.Effect, LLMError> export function generateObject( - options: GenerateObjectOptions> | GenerateObjectDynamicOptions, -) { + options: GenerateObjectDynamicOptions, +): Effect.Effect, LLMError> +export function generateObject(options: GenerateObjectOptions> | GenerateObjectDynamicOptions) { if ("schema" in options) { const { schema, ...rest } = options return runGenerateObject( diff --git a/packages/llm/src/providers/anthropic.ts b/packages/llm/src/providers/anthropic.ts index a7ec7ede9f..cca12bf7c2 100644 --- a/packages/llm/src/providers/anthropic.ts +++ b/packages/llm/src/providers/anthropic.ts @@ -7,8 +7,10 @@ export const id = ProviderID.make("anthropic") export const routes = [AnthropicMessages.route] -export const model = (id: string | ModelID, options: Omit & { readonly baseURL?: string } = {}) => - AnthropicMessages.model({ ...options, id }) +export const model = ( + id: string | ModelID, + options: Omit & { readonly baseURL?: string } = {}, +) => AnthropicMessages.model({ ...options, id }) export const provider = Provider.make({ id, diff --git a/packages/llm/src/providers/google.ts b/packages/llm/src/providers/google.ts index d63439bfec..c03b9a7c25 100644 --- a/packages/llm/src/providers/google.ts +++ b/packages/llm/src/providers/google.ts @@ -7,8 +7,10 @@ export const id = ProviderID.make("google") export const routes = [Gemini.route] -export const model = (id: string | ModelID, options: Omit & { readonly baseURL?: string } = {}) => - Gemini.model({ ...options, id }) +export const model = ( + id: string | ModelID, + options: Omit & { readonly baseURL?: string } = {}, +) => Gemini.model({ ...options, id }) export const provider = Provider.make({ id, diff --git a/packages/llm/src/route/auth.ts b/packages/llm/src/route/auth.ts index 540c2845f2..b46e223363 100644 --- a/packages/llm/src/route/auth.ts +++ b/packages/llm/src/route/auth.ts @@ -168,8 +168,9 @@ export function bearerHeader( name: string, source?: string | Redacted.Redacted | Config.Config> | Credential, ) { - const render = (input: string | Redacted.Redacted | Config.Config> | Credential) => - fromCredential(credentialInput(input), (secret) => ({ [name]: `Bearer ${secret}` })) + const render = ( + input: string | Redacted.Redacted | Config.Config> | Credential, + ) => fromCredential(credentialInput(input), (secret) => ({ [name]: `Bearer ${secret}` })) if (source === undefined) return render return render(source) } diff --git a/packages/llm/src/route/client.ts b/packages/llm/src/route/client.ts index 0b9d92cecb..734eedff21 100644 --- a/packages/llm/src/route/client.ts +++ b/packages/llm/src/route/client.ts @@ -142,9 +142,7 @@ const modelWithDefaults = if (!provider) throw new Error(`Route.model(${route.id}) requires a provider`) const baseURL = mapped.baseURL ?? defaults.baseURL ?? route.defaults.baseURL if (!baseURL) - throw new Error( - `Route.model(${route.id}) requires a baseURL — supply it via input, defaults, or route defaults`, - ) + throw new Error(`Route.model(${route.id}) requires a baseURL — supply it via input, defaults, or route defaults`) const generation = mergeGenerationOptions(route.defaults.generation, defaults.generation) const providerOptions = mergeProviderOptions(route.defaults.providerOptions, defaults.providerOptions) const http = mergeHttpOptions(httpOptions(route.defaults.http), httpOptions(defaults.http)) diff --git a/packages/llm/src/route/endpoint.ts b/packages/llm/src/route/endpoint.ts index 71d551893d..361ad508e1 100644 --- a/packages/llm/src/route/endpoint.ts +++ b/packages/llm/src/route/endpoint.ts @@ -30,9 +30,7 @@ const renderPart = (part: EndpointPart, input: EndpointInput) typeof part === "function" ? part(input) : part export const render = (endpoint: Endpoint, input: EndpointInput) => { - const url = new URL( - `${ProviderShared.trimBaseUrl(input.request.model.baseURL)}${renderPart(endpoint.path, input)}`, - ) + const url = new URL(`${ProviderShared.trimBaseUrl(input.request.model.baseURL)}${renderPart(endpoint.path, input)}`) const params = input.request.model.queryParams if (params) for (const [key, value] of Object.entries(params)) url.searchParams.set(key, value) return url diff --git a/packages/llm/test/endpoint.test.ts b/packages/llm/test/endpoint.test.ts index f708a87ea0..43d2e1c5c4 100644 --- a/packages/llm/test/endpoint.test.ts +++ b/packages/llm/test/endpoint.test.ts @@ -2,12 +2,7 @@ import { describe, expect, test } from "bun:test" import { LLM } from "../src" import { Endpoint } from "../src/route" -const request = ( - input: { - readonly baseURL: string - readonly queryParams?: Record - }, -) => +const request = (input: { readonly baseURL: string; readonly queryParams?: Record }) => LLM.request({ model: LLM.model({ id: "model-1", @@ -43,7 +38,9 @@ describe("Endpoint", () => { test("path may be a function of the validated body", () => { const url = Endpoint.render( - Endpoint.path<{ readonly modelId: string }>(({ body }) => `/model/${encodeURIComponent(body.modelId)}/converse-stream`), + Endpoint.path<{ readonly modelId: string }>( + ({ body }) => `/model/${encodeURIComponent(body.modelId)}/converse-stream`, + ), { request: request({ baseURL: "https://bedrock-runtime.us-east-1.amazonaws.com" }), body: { modelId: "us.amazon.nova-micro-v1:0" }, diff --git a/packages/llm/test/fixtures/recordings/cloudflare-ai-gateway/cloudflare-ai-gateway-workers-ai-gpt-oss-20b-tools-tool-call.json b/packages/llm/test/fixtures/recordings/cloudflare-ai-gateway/cloudflare-ai-gateway-workers-ai-gpt-oss-20b-tools-tool-call.json index 80ade53b9c..981c14f03e 100644 --- a/packages/llm/test/fixtures/recordings/cloudflare-ai-gateway/cloudflare-ai-gateway-workers-ai-gpt-oss-20b-tools-tool-call.json +++ b/packages/llm/test/fixtures/recordings/cloudflare-ai-gateway/cloudflare-ai-gateway-workers-ai-gpt-oss-20b-tools-tool-call.json @@ -7,13 +7,7 @@ "route": "cloudflare-ai-gateway", "transport": "http", "model": "workers-ai/@cf/openai/gpt-oss-20b", - "tags": [ - "prefix:cloudflare-ai-gateway", - "provider:cloudflare-ai-gateway", - "tool", - "tool-call", - "golden" - ] + "tags": ["prefix:cloudflare-ai-gateway", "provider:cloudflare-ai-gateway", "tool", "tool-call", "golden"] }, "interactions": [ { diff --git a/packages/llm/test/fixtures/recordings/cloudflare-ai-gateway/cloudflare-ai-gateway-workers-ai-llama-3-1-8b-text.json b/packages/llm/test/fixtures/recordings/cloudflare-ai-gateway/cloudflare-ai-gateway-workers-ai-llama-3-1-8b-text.json index ff535b578b..6a8eff09d9 100644 --- a/packages/llm/test/fixtures/recordings/cloudflare-ai-gateway/cloudflare-ai-gateway-workers-ai-llama-3-1-8b-text.json +++ b/packages/llm/test/fixtures/recordings/cloudflare-ai-gateway/cloudflare-ai-gateway-workers-ai-llama-3-1-8b-text.json @@ -7,12 +7,7 @@ "route": "cloudflare-ai-gateway", "transport": "http", "model": "workers-ai/@cf/meta/llama-3.1-8b-instruct", - "tags": [ - "prefix:cloudflare-ai-gateway", - "provider:cloudflare-ai-gateway", - "text", - "golden" - ] + "tags": ["prefix:cloudflare-ai-gateway", "provider:cloudflare-ai-gateway", "text", "golden"] }, "interactions": [ { diff --git a/packages/llm/test/fixtures/recordings/cloudflare-workers-ai/cloudflare-workers-ai-gpt-oss-20b-tools-tool-call.json b/packages/llm/test/fixtures/recordings/cloudflare-workers-ai/cloudflare-workers-ai-gpt-oss-20b-tools-tool-call.json index 2c973bffe1..fa22f1ddb9 100644 --- a/packages/llm/test/fixtures/recordings/cloudflare-workers-ai/cloudflare-workers-ai-gpt-oss-20b-tools-tool-call.json +++ b/packages/llm/test/fixtures/recordings/cloudflare-workers-ai/cloudflare-workers-ai-gpt-oss-20b-tools-tool-call.json @@ -7,13 +7,7 @@ "route": "cloudflare-workers-ai", "transport": "http", "model": "@cf/openai/gpt-oss-20b", - "tags": [ - "prefix:cloudflare-workers-ai", - "provider:cloudflare-workers-ai", - "tool", - "tool-call", - "golden" - ] + "tags": ["prefix:cloudflare-workers-ai", "provider:cloudflare-workers-ai", "tool", "tool-call", "golden"] }, "interactions": [ { diff --git a/packages/llm/test/fixtures/recordings/cloudflare-workers-ai/cloudflare-workers-ai-llama-3-1-8b-text.json b/packages/llm/test/fixtures/recordings/cloudflare-workers-ai/cloudflare-workers-ai-llama-3-1-8b-text.json index 4ed314e15f..52cc25f86b 100644 --- a/packages/llm/test/fixtures/recordings/cloudflare-workers-ai/cloudflare-workers-ai-llama-3-1-8b-text.json +++ b/packages/llm/test/fixtures/recordings/cloudflare-workers-ai/cloudflare-workers-ai-llama-3-1-8b-text.json @@ -7,12 +7,7 @@ "route": "cloudflare-workers-ai", "transport": "http", "model": "@cf/meta/llama-3.1-8b-instruct", - "tags": [ - "prefix:cloudflare-workers-ai", - "provider:cloudflare-workers-ai", - "text", - "golden" - ] + "tags": ["prefix:cloudflare-workers-ai", "provider:cloudflare-workers-ai", "text", "golden"] }, "interactions": [ { diff --git a/packages/llm/test/generate-object.test.ts b/packages/llm/test/generate-object.test.ts index a9e6b5bf7a..66e39f7770 100644 --- a/packages/llm/test/generate-object.test.ts +++ b/packages/llm/test/generate-object.test.ts @@ -164,7 +164,10 @@ describe("LLM.generateObject", () => { const layer = dynamicResponse((input) => Effect.sync(() => input.respond( - sseEvents(toolCallChunk("call_1", "generate_object", '{"value":"not-a-number"}'), finishChunk("tool_calls")), + sseEvents( + toolCallChunk("call_1", "generate_object", '{"value":"not-a-number"}'), + finishChunk("tool_calls"), + ), { headers: { "content-type": "text/event-stream" } }, ), ), diff --git a/packages/llm/test/provider/cloudflare.test.ts b/packages/llm/test/provider/cloudflare.test.ts index 00b69fa18c..125e79bf9e 100644 --- a/packages/llm/test/provider/cloudflare.test.ts +++ b/packages/llm/test/provider/cloudflare.test.ts @@ -181,9 +181,7 @@ describe("Cloudflare", () => { dynamicResponse((input) => Effect.gen(function* () { const web = yield* HttpClientRequest.toWeb(input.request).pipe(Effect.orDie) - expect(web.url).toBe( - "https://api.cloudflare.com/client/v4/accounts/test-account/ai/v1/chat/completions", - ) + expect(web.url).toBe("https://api.cloudflare.com/client/v4/accounts/test-account/ai/v1/chat/completions") expect(web.headers.get("authorization")).toBe("Bearer test-token") expect(decodeJson(input.text)).toMatchObject({ model: "@cf/meta/llama-3.1-8b-instruct", diff --git a/packages/llm/test/schema.test.ts b/packages/llm/test/schema.test.ts index 1c9bbf1e09..46eb85b075 100644 --- a/packages/llm/test/schema.test.ts +++ b/packages/llm/test/schema.test.ts @@ -1,14 +1,6 @@ import { describe, expect, test } from "bun:test" import { Schema } from "effect" -import { - ContentPart, - LLMEvent, - LLMRequest, - ModelID, - ModelLimits, - ModelRef, - ProviderID, -} from "../src/schema" +import { ContentPart, LLMEvent, LLMRequest, ModelID, ModelLimits, ModelRef, ProviderID } from "../src/schema" const model = new ModelRef({ id: ModelID.make("fake-model"),