Compare commits

..

2 Commits

Author SHA1 Message Date
Kit Langton
5a413edc8a test(shell-job): make shell command fixtures portable 2026-04-13 19:28:21 -04:00
Kit Langton
8f1c6e08a7 feat(shell-job): add isolated shell job service 2026-04-13 19:17:07 -04:00
39 changed files with 1566 additions and 2457 deletions

View File

@@ -371,7 +371,6 @@
"bonjour-service": "1.3.0",
"bun-pty": "0.4.8",
"chokidar": "4.0.3",
"cli-sound": "1.1.3",
"clipboardy": "4.0.0",
"cross-spawn": "catalog:",
"decimal.js": "10.5.0",
@@ -2669,8 +2668,6 @@
"cli-cursor": ["cli-cursor@3.1.0", "", { "dependencies": { "restore-cursor": "^3.1.0" } }, "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw=="],
"cli-sound": ["cli-sound@1.1.3", "", { "dependencies": { "find-exec": "^1.0.3" }, "bin": { "cli-sound": "dist/esm/cli.js" } }, "sha512-dpdF3KS3wjo1fobKG5iU9KyKqzQWAqueymHzZ9epus/dZ40487gAvS6aXFeBul+GiQAQYUTAtUWgQvw6Jftbyg=="],
"cli-spinners": ["cli-spinners@3.4.0", "", {}, "sha512-bXfOC4QcT1tKXGorxL3wbJm6XJPDqEnij2gQ2m7ESQuE+/z9YFIWnl/5RpTiKWbMq3EVKR4fRLJGn6DVfu0mpw=="],
"cli-truncate": ["cli-truncate@4.0.0", "", { "dependencies": { "slice-ansi": "^5.0.0", "string-width": "^7.0.0" } }, "sha512-nPdaFdQ0h/GEigbPClz11D0v/ZJEwxmeVZGeMo3Z5StPtUTkA9o1lD6QwoirYiSDzbcwn2XcjwmCp68W1IS4TA=="],
@@ -3095,8 +3092,6 @@
"find-babel-config": ["find-babel-config@2.1.2", "", { "dependencies": { "json5": "^2.2.3" } }, "sha512-ZfZp1rQyp4gyuxqt1ZqjFGVeVBvmpURMqdIWXbPRfB97Bf6BzdK/xSIbylEINzQ0kB5tlDQfn9HkNXXWsqTqLg=="],
"find-exec": ["find-exec@1.0.3", "", { "dependencies": { "shell-quote": "^1.8.1" } }, "sha512-gnG38zW90mS8hm5smNcrBnakPEt+cGJoiMkJwCU0IYnEb0H2NQk0NIljhNW+48oniCriFek/PH6QXbwsJo/qug=="],
"find-my-way": ["find-my-way@9.5.0", "", { "dependencies": { "fast-deep-equal": "^3.1.3", "fast-querystring": "^1.0.0", "safe-regex2": "^5.0.0" } }, "sha512-VW2RfnmscZO5KgBY5XVyKREMW5nMZcxDy+buTOsL+zIPnBlbKm+00sgzoQzq1EVh4aALZLfKdwv6atBGcjvjrQ=="],
"find-my-way-ts": ["find-my-way-ts@0.1.6", "", {}, "sha512-a85L9ZoXtNAey3Y6Z+eBWW658kO/MwR7zIafkIUPUMf3isZG0NCs2pjW2wtjxAKuJPxMAsHUIP4ZPGv0o5gyTA=="],
@@ -4417,8 +4412,6 @@
"shebang-regex": ["shebang-regex@3.0.0", "", {}, "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A=="],
"shell-quote": ["shell-quote@1.8.3", "", {}, "sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw=="],
"shiki": ["shiki@3.20.0", "", { "dependencies": { "@shikijs/core": "3.20.0", "@shikijs/engine-javascript": "3.20.0", "@shikijs/engine-oniguruma": "3.20.0", "@shikijs/langs": "3.20.0", "@shikijs/themes": "3.20.0", "@shikijs/types": "3.20.0", "@shikijs/vscode-textmate": "^10.0.2", "@types/hast": "^3.0.4" } }, "sha512-kgCOlsnyWb+p0WU+01RjkCH+eBVsjL1jOwUYWv0YDWkM2/A46+LDKVs5yZCUXjJG6bj4ndFoAg5iLIIue6dulg=="],
"shikiji": ["shikiji@0.6.13", "", { "dependencies": { "hast-util-to-html": "^9.0.0" } }, "sha512-4T7X39csvhT0p7GDnq9vysWddf2b6BeioiN3Ymhnt3xcy9tXmDcnsEFVxX18Z4YcQgEE/w48dLJ4pPPUcG9KkA=="],

View File

@@ -128,7 +128,6 @@
"bonjour-service": "1.3.0",
"bun-pty": "0.4.8",
"chokidar": "4.0.3",
"cli-sound": "1.1.3",
"clipboardy": "4.0.0",
"cross-spawn": "catalog:",
"decimal.js": "10.5.0",

View File

@@ -104,19 +104,6 @@ Introduce one small `HttpApi` group for plain JSON endpoints only. Good initial
Avoid `session.ts`, SSE, websocket, and TUI-facing routes first.
Recommended first slice:
- start with `question`
- start with `GET /question`
- start with `POST /question/:requestID/reply`
Why `question` first:
- already JSON-only
- already delegates into an Effect service
- proves list + mutation + params + payload + OpenAPI in one small slice
- avoids the harder streaming and middleware cases
### 3. Reuse existing services
Do not re-architect business logic during the HTTP migration. `HttpApi` handlers should call the same Effect services already used by the Hono handlers.
@@ -134,259 +121,13 @@ Prefer mounting an experimental `HttpApi` surface alongside the existing Hono ro
If the parallel slice works well, migrate additional JSON route groups one at a time. Leave streaming-style endpoints on Hono until there is a clear reason to move them.
## Schema rule for HttpApi work
## Proposed first steps
Every `HttpApi` slice should follow `specs/effect/schema.md` and the Schema -> Zod interop rule in `specs/effect/migration.md`.
Default rule:
- Effect Schema owns the type
- `.zod` exists only as a compatibility surface
- do not introduce a new hand-written Zod schema for a type that is already migrating to Effect Schema
Practical implication for `HttpApi` migration:
- if a route boundary already depends on a shared DTO, ID, input, output, or tagged error, migrate that model to Effect Schema first or in the same change
- if an existing Hono route or tool still needs Zod, derive it with `@/util/effect-zod`
- avoid maintaining parallel Zod and Effect definitions for the same request or response type
Ordering for a route-group migration:
1. move implicated shared `schema.ts` leaf types to Effect Schema first
2. move exported `Info` / `Input` / `Output` route DTOs to Effect Schema
3. move tagged route-facing errors to `Schema.TaggedErrorClass` where needed
4. switch existing Zod boundary validators to derived `.zod`
5. define the `HttpApi` contract from the canonical Effect schemas
Temporary exception:
- it is acceptable to keep a route-local Zod schema for the first spike only when the type is boundary-local and migrating it would create unrelated churn
- if that happens, leave a short note so the type does not become a permanent second source of truth
## First vertical slice
The first `HttpApi` spike should be intentionally small and repeatable.
Chosen slice:
- group: `question`
- endpoints: `GET /question` and `POST /question/:requestID/reply`
Non-goals:
- no `session` routes
- no SSE or websocket routes
- no auth redesign
- no broad service refactor
Behavior rule:
- preserve current runtime behavior first
- treat semantic changes such as introducing new `404` behavior as a separate follow-up unless they are required to make the contract honest
Add `POST /question/:requestID/reject` only after the first two endpoints work cleanly.
## Repeatable slice template
Use the same sequence for each route group.
1. Pick one JSON-only route group that already mostly delegates into services.
2. Identify the shared DTOs, IDs, and errors implicated by that slice.
3. Apply the schema migration ordering above so those types are Effect Schema-first.
4. Define the `HttpApi` contract separately from the handlers.
5. Implement handlers by yielding the existing service from context.
6. Mount the new surface in parallel under an experimental prefix.
7. Add one end-to-end test and one OpenAPI-focused test.
8. Compare ergonomics before migrating the next endpoint.
Rule of thumb:
- migrate one route group at a time
- migrate one or two endpoints first, not the whole file
- keep business logic in the existing service
- keep the first spike easy to delete if the experiment is not worth continuing
## Example structure
Placement rule:
- keep `HttpApi` code under `src/server`, not `src/effect`
- `src/effect` should stay focused on runtimes, layers, instance state, and shared Effect plumbing
- place each `HttpApi` slice next to the HTTP boundary it serves
- for instance-scoped routes, prefer `src/server/instance/httpapi/*`
- if control-plane routes ever migrate, prefer `src/server/control/httpapi/*`
Suggested file layout for a repeatable spike:
- `src/server/instance/httpapi/question.ts`
- `src/server/instance/httpapi/index.ts`
- `test/server/question-httpapi.test.ts`
- `test/server/question-httpapi-openapi.test.ts`
Suggested responsibilities:
- `question.ts` defines the `HttpApi` contract and `HttpApiBuilder.group(...)` handlers for the experimental slice
- `index.ts` combines experimental `HttpApi` groups and exposes the mounted handler or layer
- `question-httpapi.test.ts` proves the route works end-to-end against the real service
- `question-httpapi-openapi.test.ts` proves the generated OpenAPI is acceptable for the migrated endpoints
## Example migration shape
Each route-group spike should follow the same shape.
### 1. Contract
- define an experimental `HttpApi`
- define one `HttpApiGroup`
- define endpoint params, payload, success, and error schemas from canonical Effect schemas
- annotate summary, description, and operation ids explicitly so generated docs are stable
### 2. Handler layer
- implement with `HttpApiBuilder.group(api, groupName, ...)`
- yield the existing Effect service from context
- keep handler bodies thin
- keep transport mapping at the HTTP boundary only
### 3. Mounting
- mount under an experimental prefix such as `/experimental/httpapi`
- keep existing Hono routes unchanged
- expose separate OpenAPI output for the experimental slice first
- prefer serving the parallel experimental slice through an Effect-native server boundary (`HttpRouter.serve(...)`) instead of optimizing around Hono interop
- treat `HttpRouter.toWebHandler(...)` as the adapter path for embedding into the existing Hono server, not as the long-term target shape
### 4. Verification
- seed real state through the existing service
- call the experimental endpoints
- assert that the service behavior is unchanged
- assert that the generated OpenAPI contains the migrated paths and schemas
## Boundary composition
The first slices should keep the existing outer server composition and only replace the route contract and handler layer.
### Auth
- keep `AuthMiddleware` at the outer Hono app level
- do not duplicate auth checks inside each `HttpApi` group for the first parallel slices
- treat auth as an already-satisfied transport concern before the request reaches the `HttpApi` handler
Practical rule:
- if a route is currently protected by the shared server middleware stack, the experimental `HttpApi` route should stay mounted behind that same stack
### Instance and workspace lookup
- keep `WorkspaceRouterMiddleware` as the source of truth for resolving `directory`, `workspace`, and session-derived workspace context
- let that middleware provide `Instance.current` and `WorkspaceContext` before the request reaches the `HttpApi` handler
- keep the `HttpApi` handlers unaware of path-to-instance lookup details when the existing Hono middleware already handles them
Practical rule:
- `HttpApi` handlers should yield services from context and assume the correct instance has already been provided
- only move instance lookup into the `HttpApi` layer if we later decide to migrate the outer middleware boundary itself
### Error mapping
- keep domain and service errors typed in the service layer
- declare typed transport errors on the endpoint only when the route can actually return them intentionally
- prefer explicit endpoint-level error schemas over relying on the outer Hono `ErrorMiddleware` for expected route behavior
Practical rule:
- request decoding failures should remain transport-level `400`s
- storage or lookup failures that are part of the route contract should be declared as typed endpoint errors
- unexpected defects can still fall through to the outer error middleware while the slice is experimental
For the current parallel slices, this means:
- auth still composes outside `HttpApi`
- instance selection still composes outside `HttpApi`
- success payloads should be schema-defined from canonical Effect schemas
- known route errors should be modeled at the endpoint boundary incrementally instead of all at once
## Exit criteria for the spike
The first slice is successful if:
- the endpoints run in parallel with the current Hono routes
- the handlers reuse the existing Effect service
- request decoding and response shapes are schema-defined from canonical Effect schemas
- any remaining Zod boundary usage is derived from `.zod` or clearly temporary
- OpenAPI is generated from the `HttpApi` contract
- the tests are straightforward enough that the next slice feels mechanical
## Learnings from the question slice
The first parallel `question` spike gave us a concrete pattern to reuse.
- `Schema.Class` works well for route DTOs such as `Question.Request`, `Question.Info`, and `Question.Reply`.
- scalar or collection schemas such as `Question.Answer` should stay as schemas and use helpers like `withStatics(...)` instead of being forced into classes.
- if an `HttpApi` success schema uses `Schema.Class`, the handler or underlying service needs to return real schema instances rather than plain objects.
- internal event payloads can stay anonymous when we want to avoid adding extra named OpenAPI component churn for non-route shapes.
- the experimental slice should stay mounted in parallel and keep calling the existing service layer unchanged.
- compare generated OpenAPI semantically at the route and schema level; in the current setup the exported OpenAPI paths do not include the outer Hono mount prefix.
## Route inventory
Status legend:
- `done` - parallel `HttpApi` slice exists
- `next` - good near-term candidate
- `later` - possible, but not first wave
- `defer` - not a good early `HttpApi` target
Current instance route inventory:
- `question` - `done`
endpoints in slice: `GET /question`, `POST /question/:requestID/reply`
- `permission` - `done`
endpoints in slice: `GET /permission`, `POST /permission/:requestID/reply`
- `provider` - `next`
best next endpoint: `GET /provider/auth`
later endpoint: `GET /provider`
defer first-wave OAuth mutations
- `config` - `next`
best next endpoint: `GET /config/providers`
later endpoint: `GET /config`
defer `PATCH /config` for now
- `project` - `later`
best small reads: `GET /project`, `GET /project/current`
defer git-init mutation first
- `workspace` - `later`
best small reads: `GET /experimental/workspace/adaptor`, `GET /experimental/workspace`, `GET /experimental/workspace/status`
defer create/remove mutations first
- `file` - `later`
good JSON-only candidate set, but larger than the current first-wave slices
- `mcp` - `later`
has JSON-only endpoints, but interactive OAuth/auth flows make it a worse early fit
- `session` - `defer`
large, stateful, mixes CRUD with prompt/shell/command/share/revert flows and a streaming route
- `event` - `defer`
SSE only
- `global` - `defer`
mixed bag with SSE and process-level side effects
- `pty` - `defer`
websocket-heavy route surface
- `tui` - `defer`
queue-style UI bridge, weak early `HttpApi` fit
Recommended near-term sequence after the first spike:
1. `provider` auth read endpoint
2. `config` providers read endpoint
3. `project` read endpoints
4. `workspace` read endpoints
## Checklist
- [x] add one small spike that defines an `HttpApi` group for a simple JSON route set
- [x] use Effect Schema request / response types for that slice
- [x] keep the underlying service calls identical to the current handlers
- [x] compare generated OpenAPI against the current Hono/OpenAPI setup
- [x] document how auth, instance lookup, and error mapping would compose in the new stack
- [ ] add one small spike that defines an `HttpApi` group for a simple JSON route set
- [ ] use Effect Schema request / response types for that slice
- [ ] keep the underlying service calls identical to the current handlers
- [ ] compare generated OpenAPI against the current Hono/OpenAPI setup
- [ ] document how auth, instance lookup, and error mapping would compose in the new stack
- [ ] decide after the spike whether `HttpApi` should stay parallel, replace only some groups, or become the long-term default
## Rule of thumb

View File

@@ -1,4 +0,0 @@
declare module "*.wav" {
const file: string
export default file
}

View File

@@ -1,5 +1,4 @@
import { Server } from "../../server/server"
import { ExperimentalHttpApiServer } from "../../server/instance/httpapi/server"
import { cmd } from "./cmd"
import { withNetworkOptions, resolveNetworkOptions } from "../network"
import { Flag } from "../../flag/flag"
@@ -18,18 +17,8 @@ export const ServeCommand = cmd({
const opts = await resolveNetworkOptions(args)
const server = await Server.listen(opts)
console.log(`opencode server listening on http://${server.hostname}:${server.port}`)
const httpapi = Flag.OPENCODE_EXPERIMENTAL_HTTPAPI_PORT
? await ExperimentalHttpApiServer.listen({
hostname: opts.hostname,
port: Flag.OPENCODE_EXPERIMENTAL_HTTPAPI_PORT,
})
: undefined
if (httpapi) {
console.log(`experimental httpapi listening on http://${httpapi.hostname}:${httpapi.port}`)
}
await new Promise(() => {})
await httpapi?.stop()
await server.stop()
},
})

View File

@@ -1,630 +1,82 @@
import { BoxRenderable, MouseButton, MouseEvent, RGBA, TextAttributes } from "@opentui/core"
import { For, createMemo, createSignal, onCleanup, type JSX } from "solid-js"
import { TextAttributes, RGBA } from "@opentui/core"
import { For, type JSX } from "solid-js"
import { useTheme, tint } from "@tui/context/theme"
import { Sound } from "@tui/util/sound"
import { logo } from "@/cli/logo"
import { logo, marks } from "@/cli/logo"
// Shadow markers (rendered chars in parens):
// _ = full shadow cell (space with bg=shadow)
// ^ = letter top, shadow bottom (▀ with fg=letter, bg=shadow)
// ~ = shadow top only (▀ with fg=shadow)
const GAP = 1
const WIDTH = 0.76
const GAIN = 2.3
const FLASH = 2.15
const TRAIL = 0.28
const SWELL = 0.24
const WIDE = 1.85
const DRIFT = 1.45
const EXPAND = 1.62
const LIFE = 1020
const CHARGE = 3000
const HOLD = 90
const SINK = 40
const ARC = 2.2
const FORK = 1.2
const DIM = 1.04
const KICK = 0.86
const LAG = 60
const SUCK = 0.34
const SHIMMER_IN = 60
const SHIMMER_OUT = 2.8
const TRACE = 0.033
const TAIL = 1.8
const TRACE_IN = 200
const GLOW_OUT = 1600
const PEAK = RGBA.fromInts(255, 255, 255)
type Ring = {
x: number
y: number
at: number
force: number
kick: number
}
type Hold = {
x: number
y: number
at: number
glyph: number | undefined
}
type Release = {
x: number
y: number
at: number
glyph: number | undefined
level: number
rise: number
}
type Glow = {
glyph: number
at: number
force: number
}
type Frame = {
t: number
list: Ring[]
hold: Hold | undefined
release: Release | undefined
glow: Glow | undefined
spark: number
}
const LEFT = logo.left[0]?.length ?? 0
const FULL = logo.left.map((line, i) => line + " ".repeat(GAP) + logo.right[i])
const SPAN = Math.hypot(FULL[0]?.length ?? 0, FULL.length * 2) * 0.94
const NEAR = [
[1, 0],
[1, 1],
[0, 1],
[-1, 1],
[-1, 0],
[-1, -1],
[0, -1],
[1, -1],
] as const
type Trace = {
glyph: number
i: number
l: number
}
function clamp(n: number) {
return Math.max(0, Math.min(1, n))
}
function lerp(a: number, b: number, t: number) {
return a + (b - a) * clamp(t)
}
function ease(t: number) {
const p = clamp(t)
return p * p * (3 - 2 * p)
}
function push(t: number) {
const p = clamp(t)
return ease(p * p)
}
function ramp(t: number, start: number, end: number) {
if (end <= start) return ease(t >= end ? 1 : 0)
return ease((t - start) / (end - start))
}
function glow(base: RGBA, theme: ReturnType<typeof useTheme>["theme"], n: number) {
const mid = tint(base, theme.primary, 0.84)
const top = tint(theme.primary, PEAK, 0.96)
if (n <= 1) return tint(base, mid, Math.min(1, Math.sqrt(Math.max(0, n)) * 1.14))
return tint(mid, top, Math.min(1, 1 - Math.exp(-2.4 * (n - 1))))
}
function shade(base: RGBA, theme: ReturnType<typeof useTheme>["theme"], n: number) {
if (n >= 0) return glow(base, theme, n)
return tint(base, theme.background, Math.min(0.82, -n * 0.64))
}
function ghost(n: number, scale: number) {
if (n < 0) return n
return n * scale
}
function noise(x: number, y: number, t: number) {
const n = Math.sin(x * 12.9898 + y * 78.233 + t * 0.043) * 43758.5453
return n - Math.floor(n)
}
function lit(char: string) {
return char !== " " && char !== "_" && char !== "~"
}
function key(x: number, y: number) {
return `${x},${y}`
}
function route(list: Array<{ x: number; y: number }>) {
const left = new Map(list.map((item) => [key(item.x, item.y), item]))
const path: Array<{ x: number; y: number }> = []
let cur = [...left.values()].sort((a, b) => a.y - b.y || a.x - b.x)[0]
let dir = { x: 1, y: 0 }
while (cur) {
path.push(cur)
left.delete(key(cur.x, cur.y))
if (!left.size) return path
const next = NEAR.map(([dx, dy]) => left.get(key(cur.x + dx, cur.y + dy)))
.filter((item): item is { x: number; y: number } => !!item)
.sort((a, b) => {
const ax = a.x - cur.x
const ay = a.y - cur.y
const bx = b.x - cur.x
const by = b.y - cur.y
const adot = ax * dir.x + ay * dir.y
const bdot = bx * dir.x + by * dir.y
if (adot !== bdot) return bdot - adot
return Math.abs(ax) + Math.abs(ay) - (Math.abs(bx) + Math.abs(by))
})[0]
if (!next) {
cur = [...left.values()].sort((a, b) => {
const da = (a.x - cur.x) ** 2 + (a.y - cur.y) ** 2
const db = (b.x - cur.x) ** 2 + (b.y - cur.y) ** 2
return da - db
})[0]
dir = { x: 1, y: 0 }
continue
}
dir = { x: next.x - cur.x, y: next.y - cur.y }
cur = next
}
return path
}
function mapGlyphs() {
const cells = [] as Array<{ x: number; y: number }>
for (let y = 0; y < FULL.length; y++) {
for (let x = 0; x < (FULL[y]?.length ?? 0); x++) {
if (lit(FULL[y]?.[x] ?? " ")) cells.push({ x, y })
}
}
const all = new Map(cells.map((item) => [key(item.x, item.y), item]))
const seen = new Set<string>()
const glyph = new Map<string, number>()
const trace = new Map<string, Trace>()
const center = new Map<number, { x: number; y: number }>()
let id = 0
for (const item of cells) {
const start = key(item.x, item.y)
if (seen.has(start)) continue
const stack = [item]
const part = [] as Array<{ x: number; y: number }>
seen.add(start)
while (stack.length) {
const cur = stack.pop()!
part.push(cur)
glyph.set(key(cur.x, cur.y), id)
for (const [dx, dy] of NEAR) {
const next = all.get(key(cur.x + dx, cur.y + dy))
if (!next) continue
const mark = key(next.x, next.y)
if (seen.has(mark)) continue
seen.add(mark)
stack.push(next)
}
}
const path = route(part)
path.forEach((cell, i) => trace.set(key(cell.x, cell.y), { glyph: id, i, l: path.length }))
center.set(id, {
x: part.reduce((sum, item) => sum + item.x, 0) / part.length + 0.5,
y: (part.reduce((sum, item) => sum + item.y, 0) / part.length) * 2 + 1,
})
id++
}
return { glyph, trace, center }
}
const MAP = mapGlyphs()
function shimmer(x: number, y: number, frame: Frame) {
return frame.list.reduce((best, item) => {
const age = frame.t - item.at
if (age < SHIMMER_IN || age > LIFE) return best
const dx = x + 0.5 - item.x
const dy = y * 2 + 1 - item.y
const dist = Math.hypot(dx, dy)
const p = age / LIFE
const r = SPAN * (1 - (1 - p) ** EXPAND)
const lag = r - dist
if (lag < 0.18 || lag > SHIMMER_OUT) return best
const band = Math.exp(-(((lag - 1.05) / 0.68) ** 2))
const wobble = 0.5 + 0.5 * Math.sin(frame.t * 0.035 + x * 0.9 + y * 1.7)
const n = band * wobble * (1 - p) ** 1.45
if (n > best) return n
return best
}, 0)
}
function remain(x: number, y: number, item: Release, t: number) {
const age = t - item.at
if (age < 0 || age > LIFE) return 0
const p = age / LIFE
const dx = x + 0.5 - item.x - 0.5
const dy = y * 2 + 1 - item.y * 2 - 1
const dist = Math.hypot(dx, dy)
const r = SPAN * (1 - (1 - p) ** EXPAND)
if (dist > r) return 1
return clamp((r - dist) / 1.35 < 1 ? 1 - (r - dist) / 1.35 : 0)
}
function wave(x: number, y: number, frame: Frame, live: boolean) {
return frame.list.reduce((sum, item) => {
const age = frame.t - item.at
if (age < 0 || age > LIFE) return sum
const p = age / LIFE
const dx = x + 0.5 - item.x
const dy = y * 2 + 1 - item.y
const dist = Math.hypot(dx, dy)
const r = SPAN * (1 - (1 - p) ** EXPAND)
const fade = (1 - p) ** 1.32
const j = 1.02 + noise(x + item.x * 0.7, y + item.y * 0.7, item.at * 0.002 + age * 0.06) * 0.52
const edge = Math.exp(-(((dist - r) / WIDTH) ** 2)) * GAIN * fade * item.force * j
const swell = Math.exp(-(((dist - Math.max(0, r - DRIFT)) / WIDE) ** 2)) * SWELL * fade * item.force
const trail = dist < r ? Math.exp(-(r - dist) / 2.4) * TRAIL * fade * item.force * lerp(0.92, 1.22, j) : 0
const flash = Math.exp(-(dist * dist) / 3.2) * FLASH * item.force * Math.max(0, 1 - age / 140) * lerp(0.95, 1.18, j)
const kick = Math.exp(-(dist * dist) / 2) * item.kick * Math.max(0, 1 - age / 100)
const suck = Math.exp(-(((dist - 1.25) / 0.75) ** 2)) * item.kick * SUCK * Math.max(0, 1 - age / 110)
const wake = live && dist < r ? Math.exp(-(r - dist) / 1.25) * 0.32 * fade : 0
return sum + edge + swell + trail + flash + wake - kick - suck
}, 0)
}
function field(x: number, y: number, frame: Frame) {
const held = frame.hold
const rest = frame.release
const item = held ?? rest
if (!item) return 0
const rise = held ? ramp(frame.t - held.at, HOLD, CHARGE) : rest!.rise
const level = held ? push(rise) : rest!.level
const body = rise
const storm = level * level
const sink = held ? ramp(frame.t - held.at, SINK, CHARGE) : rest!.rise
const dx = x + 0.5 - item.x - 0.5
const dy = y * 2 + 1 - item.y * 2 - 1
const dist = Math.hypot(dx, dy)
const angle = Math.atan2(dy, dx)
const spin = frame.t * lerp(0.008, 0.018, storm)
const dim = lerp(0, DIM, sink) * lerp(0.99, 1.01, 0.5 + 0.5 * Math.sin(frame.t * 0.014))
const core = Math.exp(-(dist * dist) / Math.max(0.22, lerp(0.22, 3.2, body))) * lerp(0.42, 2.45, body)
const shell =
Math.exp(-(((dist - lerp(0.16, 2.05, body)) / Math.max(0.18, lerp(0.18, 0.82, body))) ** 2)) * lerp(0.1, 0.95, body)
const ember =
Math.exp(-(((dist - lerp(0.45, 2.65, body)) / Math.max(0.14, lerp(0.14, 0.62, body))) ** 2)) *
lerp(0.02, 0.78, body)
const arc = Math.max(0, Math.cos(angle * 3 - spin + frame.spark * 2.2)) ** 8
const seam = Math.max(0, Math.cos(angle * 5 + spin * 1.55)) ** 12
const ring = Math.exp(-(((dist - lerp(1.05, 3, level)) / 0.48) ** 2)) * arc * lerp(0.03, 0.5 + ARC, storm)
const fork = Math.exp(-(((dist - (1.55 + storm * 2.1)) / 0.36) ** 2)) * seam * storm * FORK
const spark = Math.max(0, noise(x, y, frame.t) - lerp(0.94, 0.66, storm)) * lerp(0, 5.4, storm)
const glitch = spark * Math.exp(-dist / Math.max(1.2, 3.1 - storm))
const crack = Math.max(0, Math.cos((dx - dy) * 1.6 + spin * 2.1)) ** 18
const lash = crack * Math.exp(-(((dist - (1.95 + storm * 2)) / 0.28) ** 2)) * storm * 1.1
const flicker =
Math.max(0, noise(item.x * 3.1, item.y * 2.7, frame.t * 1.7) - 0.72) *
Math.exp(-(dist * dist) / 0.15) *
lerp(0.08, 0.42, body)
const fade = frame.release && !frame.hold ? remain(x, y, frame.release, frame.t) : 1
return (core + shell + ember + ring + fork + glitch + lash + flicker - dim) * fade
}
function pick(x: number, y: number, frame: Frame) {
const held = frame.hold
const rest = frame.release
const item = held ?? rest
if (!item) return 0
const rise = held ? ramp(frame.t - held.at, HOLD, CHARGE) : rest!.rise
const dx = x + 0.5 - item.x - 0.5
const dy = y * 2 + 1 - item.y * 2 - 1
const dist = Math.hypot(dx, dy)
const fade = frame.release && !frame.hold ? remain(x, y, frame.release, frame.t) : 1
return Math.exp(-(dist * dist) / 1.7) * lerp(0.2, 0.96, rise) * fade
}
function select(x: number, y: number) {
const direct = MAP.glyph.get(key(x, y))
if (direct !== undefined) return direct
const near = NEAR.map(([dx, dy]) => MAP.glyph.get(key(x + dx, y + dy))).find(
(item): item is number => item !== undefined,
)
return near
}
function trace(x: number, y: number, frame: Frame) {
const held = frame.hold
const rest = frame.release
const item = held ?? rest
if (!item || item.glyph === undefined) return 0
const step = MAP.trace.get(key(x, y))
if (!step || step.glyph !== item.glyph || step.l < 2) return 0
const age = frame.t - item.at
const rise = held ? ramp(age, HOLD, CHARGE) : rest!.rise
const appear = held ? ramp(age, 0, TRACE_IN) : 1
const speed = lerp(TRACE * 0.48, TRACE * 0.88, rise)
const head = (age * speed) % step.l
const dist = Math.min(Math.abs(step.i - head), step.l - Math.abs(step.i - head))
const tail = (head - TAIL + step.l) % step.l
const lag = Math.min(Math.abs(step.i - tail), step.l - Math.abs(step.i - tail))
const fade = frame.release && !frame.hold ? remain(x, y, frame.release, frame.t) : 1
const core = Math.exp(-((dist / 1.05) ** 2)) * lerp(0.8, 2.35, rise)
const glow = Math.exp(-((dist / 1.85) ** 2)) * lerp(0.08, 0.34, rise)
const trail = Math.exp(-((lag / 1.45) ** 2)) * lerp(0.04, 0.42, rise)
return (core + glow + trail) * appear * fade
}
function bloom(x: number, y: number, frame: Frame) {
const item = frame.glow
if (!item) return 0
const glyph = MAP.glyph.get(key(x, y))
if (glyph !== item.glyph) return 0
const age = frame.t - item.at
if (age < 0 || age > GLOW_OUT) return 0
const p = age / GLOW_OUT
const flash = (1 - p) ** 2
const dx = x + 0.5 - MAP.center.get(item.glyph)!.x
const dy = y * 2 + 1 - MAP.center.get(item.glyph)!.y
const bias = Math.exp(-((Math.hypot(dx, dy) / 2.8) ** 2))
return lerp(item.force, item.force * 0.18, p) * lerp(0.72, 1.1, bias) * flash
}
const SHADOW_MARKER = new RegExp(`[${marks}]`)
export function Logo() {
const { theme } = useTheme()
const [rings, setRings] = createSignal<Ring[]>([])
const [hold, setHold] = createSignal<Hold>()
const [release, setRelease] = createSignal<Release>()
const [glow, setGlow] = createSignal<Glow>()
const [now, setNow] = createSignal(0)
let box: BoxRenderable | undefined
let timer: ReturnType<typeof setInterval> | undefined
let hum = false
const stop = () => {
if (!timer) return
clearInterval(timer)
timer = undefined
}
const tick = () => {
const t = performance.now()
setNow(t)
const item = hold()
if (item && !hum && t - item.at >= HOLD) {
hum = true
Sound.start()
}
if (item && t - item.at >= CHARGE) {
burst(item.x, item.y)
}
let live = false
setRings((list) => {
const next = list.filter((item) => t - item.at < LIFE)
live = next.length > 0
return next
})
const flash = glow()
if (flash && t - flash.at >= GLOW_OUT) {
setGlow(undefined)
}
if (!live) setRelease(undefined)
if (live || hold() || release() || glow()) return
stop()
}
const start = () => {
if (timer) return
timer = setInterval(tick, 16)
}
const hit = (x: number, y: number) => {
const char = FULL[y]?.[x]
return char !== undefined && char !== " "
}
const press = (x: number, y: number, t: number) => {
const last = hold()
if (last) burst(last.x, last.y)
setNow(t)
if (!last) setRelease(undefined)
setHold({ x, y, at: t, glyph: select(x, y) })
hum = false
start()
}
const burst = (x: number, y: number) => {
const item = hold()
if (!item) return
hum = false
const t = performance.now()
const age = t - item.at
const rise = ramp(age, HOLD, CHARGE)
const level = push(rise)
setHold(undefined)
setRelease({ x, y, at: t, glyph: item.glyph, level, rise })
if (item.glyph !== undefined) {
setGlow({ glyph: item.glyph, at: t, force: lerp(0.18, 1.5, rise * level) })
}
setRings((list) => [
...list,
{
x: x + 0.5,
y: y * 2 + 1,
at: t,
force: lerp(0.82, 2.55, level),
kick: lerp(0.32, 0.32 + KICK, level),
},
])
setNow(t)
start()
Sound.pulse(lerp(0.8, 1, level))
}
const frame = createMemo(() => {
const t = now()
const item = hold()
return {
t,
list: rings(),
hold: item,
release: release(),
glow: glow(),
spark: item ? noise(item.x, item.y, t) : 0,
}
})
const dusk = createMemo(() => {
const base = frame()
const t = base.t - LAG
const item = base.hold
return {
t,
list: base.list,
hold: item,
release: base.release,
glow: base.glow,
spark: item ? noise(item.x, item.y, t) : 0,
}
})
const renderLine = (
line: string,
y: number,
ink: RGBA,
bold: boolean,
off: number,
frame: Frame,
dusk: Frame,
): JSX.Element[] => {
const shadow = tint(theme.background, ink, 0.25)
const renderLine = (line: string, fg: RGBA, bold: boolean): JSX.Element[] => {
const shadow = tint(theme.background, fg, 0.25)
const attrs = bold ? TextAttributes.BOLD : undefined
const elements: JSX.Element[] = []
let i = 0
return [...line].map((char, i) => {
const h = field(off + i, y, frame)
const n = wave(off + i, y, frame, lit(char)) + h
const s = wave(off + i, y, dusk, false) + h
const p = lit(char) ? pick(off + i, y, frame) : 0
const e = lit(char) ? trace(off + i, y, frame) : 0
const b = lit(char) ? bloom(off + i, y, frame) : 0
const q = shimmer(off + i, y, frame)
while (i < line.length) {
const rest = line.slice(i)
const markerIndex = rest.search(SHADOW_MARKER)
if (char === "_") {
return (
<text
fg={shade(ink, theme, s * 0.08)}
bg={shade(shadow, theme, ghost(s, 0.24) + ghost(q, 0.06))}
attributes={attrs}
selectable={false}
>
{" "}
</text>
if (markerIndex === -1) {
elements.push(
<text fg={fg} attributes={attrs} selectable={false}>
{rest}
</text>,
)
break
}
if (markerIndex > 0) {
elements.push(
<text fg={fg} attributes={attrs} selectable={false}>
{rest.slice(0, markerIndex)}
</text>,
)
}
if (char === "^") {
return (
<text
fg={shade(ink, theme, n + p + e + b)}
bg={shade(shadow, theme, ghost(s, 0.18) + ghost(q, 0.05) + ghost(b, 0.08))}
attributes={attrs}
selectable={false}
>
</text>
)
const marker = rest[markerIndex]
switch (marker) {
case "_":
elements.push(
<text fg={fg} bg={shadow} attributes={attrs} selectable={false}>
{" "}
</text>,
)
break
case "^":
elements.push(
<text fg={fg} bg={shadow} attributes={attrs} selectable={false}>
</text>,
)
break
case "~":
elements.push(
<text fg={shadow} attributes={attrs} selectable={false}>
</text>,
)
break
}
if (char === "~") {
return (
<text fg={shade(shadow, theme, ghost(s, 0.22) + ghost(q, 0.05))} attributes={attrs} selectable={false}>
</text>
)
}
if (char === " ") {
return (
<text fg={ink} attributes={attrs} selectable={false}>
{char}
</text>
)
}
return (
<text fg={shade(ink, theme, n + p + e + b)} attributes={attrs} selectable={false}>
{char}
</text>
)
})
}
onCleanup(() => {
stop()
hum = false
Sound.dispose()
})
const mouse = (evt: MouseEvent) => {
if (!box) return
if ((evt.type === "down" || evt.type === "drag") && evt.button === MouseButton.LEFT) {
const x = evt.x - box.x
const y = evt.y - box.y
if (!hit(x, y)) return
if (evt.type === "drag" && hold()) return
evt.preventDefault()
evt.stopPropagation()
const t = performance.now()
press(x, y, t)
return
i += markerIndex + 1
}
if (!hold()) return
if (evt.type === "up") {
const item = hold()
if (!item) return
burst(item.x, item.y)
}
return elements
}
return (
<box ref={(item: BoxRenderable) => (box = item)}>
<box
position="absolute"
top={0}
left={0}
width={FULL[0]?.length ?? 0}
height={FULL.length}
zIndex={1}
onMouse={mouse}
/>
<box>
<For each={logo.left}>
{(line, index) => (
<box flexDirection="row" gap={1}>
<box flexDirection="row">{renderLine(line, index(), theme.textMuted, false, 0, frame(), dusk())}</box>
<box flexDirection="row">
{renderLine(logo.right[index()], index(), theme.text, true, LEFT + GAP, frame(), dusk())}
</box>
<box flexDirection="row">{renderLine(line, theme.textMuted, false)}</box>
<box flexDirection="row">{renderLine(logo.right[index()], theme.text, true)}</box>
</box>
)}
</For>

View File

@@ -2195,7 +2195,7 @@ function Question(props: ToolProps<typeof QuestionTool>) {
const { theme } = useTheme()
const count = createMemo(() => props.input.questions?.length ?? 0)
function format(answer?: ReadonlyArray<string>) {
function format(answer?: string[]) {
if (!answer?.length) return "(no answer)"
return answer.join(", ")
}

View File

@@ -1,156 +0,0 @@
import { Player } from "cli-sound"
import { mkdirSync } from "node:fs"
import { tmpdir } from "node:os"
import { basename, join } from "node:path"
import { Process } from "@/util/process"
import { which } from "@/util/which"
import pulseA from "../asset/pulse-a.wav" with { type: "file" }
import pulseB from "../asset/pulse-b.wav" with { type: "file" }
import pulseC from "../asset/pulse-c.wav" with { type: "file" }
import charge from "../asset/charge.wav" with { type: "file" }
const FILE = [pulseA, pulseB, pulseC]
const HUM = charge
const DIR = join(tmpdir(), "opencode-sfx")
const LIST = [
"ffplay",
"mpv",
"mpg123",
"mpg321",
"mplayer",
"afplay",
"play",
"omxplayer",
"aplay",
"cmdmp3",
"cvlc",
"powershell.exe",
] as const
type Kind = (typeof LIST)[number]
function args(kind: Kind, file: string, volume: number) {
if (kind === "ffplay") return [kind, "-autoexit", "-nodisp", "-af", `volume=${volume}`, file]
if (kind === "mpv")
return [kind, "--no-video", "--audio-display=no", "--volume", String(Math.round(volume * 100)), file]
if (kind === "mpg123" || kind === "mpg321") return [kind, "-g", String(Math.round(volume * 100)), file]
if (kind === "mplayer") return [kind, "-vo", "null", "-volume", String(Math.round(volume * 100)), file]
if (kind === "afplay" || kind === "omxplayer" || kind === "aplay" || kind === "cmdmp3") return [kind, file]
if (kind === "play") return [kind, "-v", String(volume), file]
if (kind === "cvlc") return [kind, `--gain=${volume}`, "--play-and-exit", file]
return [kind, "-c", `(New-Object Media.SoundPlayer '${file.replace(/'/g, "''")}').PlaySync()`]
}
export namespace Sound {
let item: Player | null | undefined
let kind: Kind | null | undefined
let proc: Process.Child | undefined
let tail: ReturnType<typeof setTimeout> | undefined
let cache: Promise<{ hum: string; pulse: string[] }> | undefined
let seq = 0
let shot = 0
function load() {
if (item !== undefined) return item
try {
item = new Player({ volume: 0.35 })
} catch {
item = null
}
return item
}
async function file(path: string) {
mkdirSync(DIR, { recursive: true })
const next = join(DIR, basename(path))
const out = Bun.file(next)
if (await out.exists()) return next
await Bun.write(out, Bun.file(path))
return next
}
function asset() {
cache ??= Promise.all([file(HUM), Promise.all(FILE.map(file))]).then(([hum, pulse]) => ({ hum, pulse }))
return cache
}
function pick() {
if (kind !== undefined) return kind
kind = LIST.find((item) => which(item)) ?? null
return kind
}
function run(file: string, volume: number) {
const kind = pick()
if (!kind) return
return Process.spawn(args(kind, file, volume), {
stdin: "ignore",
stdout: "ignore",
stderr: "ignore",
})
}
function clear() {
if (!tail) return
clearTimeout(tail)
tail = undefined
}
function play(file: string, volume: number) {
const item = load()
if (!item) return run(file, volume)?.exited
return item.play(file, { volume }).catch(() => run(file, volume)?.exited)
}
export function start() {
stop()
const id = ++seq
void asset().then(({ hum }) => {
if (id !== seq) return
const next = run(hum, 0.24)
if (!next) return
proc = next
void next.exited.then(
() => {
if (id !== seq) return
if (proc === next) proc = undefined
},
() => {
if (id !== seq) return
if (proc === next) proc = undefined
},
)
})
}
export function stop(delay = 0) {
seq++
clear()
if (!proc) return
const next = proc
if (delay <= 0) {
proc = undefined
void Process.stop(next).catch(() => undefined)
return
}
tail = setTimeout(() => {
tail = undefined
if (proc === next) proc = undefined
void Process.stop(next).catch(() => undefined)
}, delay)
}
export function pulse(scale = 1) {
stop(140)
const index = shot++ % FILE.length
void asset()
.then(({ pulse }) => play(pulse[index], 0.26 + 0.14 * scale))
.catch(() => undefined)
}
export function dispose() {
stop()
}
}

View File

@@ -330,7 +330,6 @@ export namespace Ripgrep {
glob?: string[]
limit?: number
follow?: boolean
file?: string[]
}) => Effect.Effect<{ items: Item[]; partial: boolean }, PlatformError | Error>
}
@@ -352,7 +351,6 @@ export namespace Ripgrep {
maxDepth?: number
limit?: number
pattern?: string
file?: string[]
}) {
const out = [yield* bin(), input.mode === "search" ? "--json" : "--files", "--glob=!.git/*"]
if (input.follow) out.push("--follow")
@@ -365,7 +363,7 @@ export namespace Ripgrep {
}
if (input.limit) out.push(`--max-count=${input.limit}`)
if (input.mode === "search") out.push("--no-messages")
if (input.pattern) out.push("--", input.pattern, ...(input.file ?? []))
if (input.pattern) out.push("--", input.pattern)
return out
})
@@ -407,7 +405,6 @@ export namespace Ripgrep {
glob?: string[]
limit?: number
follow?: boolean
file?: string[]
}) {
return yield* Effect.scoped(
Effect.gen(function* () {
@@ -417,7 +414,6 @@ export namespace Ripgrep {
follow: input.follow,
limit: input.limit,
pattern: input.pattern,
file: input.file,
})
const handle = yield* spawner.spawn(

View File

@@ -47,7 +47,6 @@ export namespace Flag {
export declare const OPENCODE_CLIENT: string
export const OPENCODE_SERVER_PASSWORD = process.env["OPENCODE_SERVER_PASSWORD"]
export const OPENCODE_SERVER_USERNAME = process.env["OPENCODE_SERVER_USERNAME"]
export const OPENCODE_EXPERIMENTAL_HTTPAPI_PORT = number("OPENCODE_EXPERIMENTAL_HTTPAPI_PORT")
export const OPENCODE_ENABLE_QUESTION_TOOL = truthy("OPENCODE_ENABLE_QUESTION_TOOL")
// Experimental

View File

@@ -3,6 +3,7 @@ import { randomBytes } from "crypto"
export namespace Identifier {
const prefixes = {
job: "job",
event: "evt",
session: "ses",
message: "msg",

View File

@@ -2,6 +2,7 @@ import { Bus } from "@/bus"
import { BusEvent } from "@/bus/bus-event"
import { Config } from "@/config/config"
import { InstanceState } from "@/effect/instance-state"
import { makeRuntime } from "@/effect/run-service"
import { ProjectID } from "@/project/schema"
import { Instance } from "@/project/instance"
import { MessageID, SessionID } from "@/session/schema"
@@ -307,4 +308,18 @@ export namespace Permission {
}
export const defaultLayer = layer.pipe(Layer.provide(Bus.layer))
export const { runPromise } = makeRuntime(Service, defaultLayer)
export async function ask(input: z.infer<typeof AskInput>) {
return runPromise((s) => s.ask(input))
}
export async function reply(input: z.infer<typeof ReplyInput>) {
return runPromise((s) => s.reply(input))
}
export async function list() {
return runPromise((s) => s.list())
}
}

View File

@@ -3,9 +3,8 @@ import { Bus } from "@/bus"
import { BusEvent } from "@/bus/bus-event"
import { InstanceState } from "@/effect/instance-state"
import { SessionID, MessageID } from "@/session/schema"
import { zod } from "@/util/effect-zod"
import { Log } from "@/util/log"
import { withStatics } from "@/util/schema"
import z from "zod"
import { QuestionID } from "./schema"
export namespace Question {
@@ -13,91 +12,67 @@ export namespace Question {
// Schemas
export class Option extends Schema.Class<Option>("QuestionOption")({
label: Schema.String.annotate({
description: "Display text (1-5 words, concise)",
}),
description: Schema.String.annotate({
description: "Explanation of choice",
}),
}) {
static readonly zod = zod(this)
}
export const Option = z
.object({
label: z.string().describe("Display text (1-5 words, concise)"),
description: z.string().describe("Explanation of choice"),
})
.meta({ ref: "QuestionOption" })
export type Option = z.infer<typeof Option>
const base = {
question: Schema.String.annotate({
description: "Complete question",
}),
header: Schema.String.annotate({
description: "Very short label (max 30 chars)",
}),
options: Schema.Array(Option).annotate({
description: "Available choices",
}),
multiple: Schema.optional(Schema.Boolean).annotate({
description: "Allow selecting multiple choices",
}),
}
export const Info = z
.object({
question: z.string().describe("Complete question"),
header: z.string().describe("Very short label (max 30 chars)"),
options: z.array(Option).describe("Available choices"),
multiple: z.boolean().optional().describe("Allow selecting multiple choices"),
custom: z.boolean().optional().describe("Allow typing a custom answer (default: true)"),
})
.meta({ ref: "QuestionInfo" })
export type Info = z.infer<typeof Info>
export class Info extends Schema.Class<Info>("QuestionInfo")({
...base,
custom: Schema.optional(Schema.Boolean).annotate({
description: "Allow typing a custom answer (default: true)",
}),
}) {
static readonly zod = zod(this)
}
export const Request = z
.object({
id: QuestionID.zod,
sessionID: SessionID.zod,
questions: z.array(Info).describe("Questions to ask"),
tool: z
.object({
messageID: MessageID.zod,
callID: z.string(),
})
.optional(),
})
.meta({ ref: "QuestionRequest" })
export type Request = z.infer<typeof Request>
export class Prompt extends Schema.Class<Prompt>("QuestionPrompt")(base) {
static readonly zod = zod(this)
}
export const Answer = z.array(z.string()).meta({ ref: "QuestionAnswer" })
export type Answer = z.infer<typeof Answer>
export class Tool extends Schema.Class<Tool>("QuestionTool")({
messageID: MessageID,
callID: Schema.String,
}) {
static readonly zod = zod(this)
}
export class Request extends Schema.Class<Request>("QuestionRequest")({
id: QuestionID,
sessionID: SessionID,
questions: Schema.Array(Info).annotate({
description: "Questions to ask",
}),
tool: Schema.optional(Tool),
}) {
static readonly zod = zod(this)
}
export const Answer = Schema.Array(Schema.String)
.annotate({ identifier: "QuestionAnswer" })
.pipe(withStatics((s) => ({ zod: zod(s) })))
export type Answer = Schema.Schema.Type<typeof Answer>
export class Reply extends Schema.Class<Reply>("QuestionReply")({
answers: Schema.Array(Answer).annotate({
description: "User answers in order of questions (each answer is an array of selected labels)",
}),
}) {
static readonly zod = zod(this)
}
class Replied extends Schema.Class<Replied>("QuestionReplied")({
sessionID: SessionID,
requestID: QuestionID,
answers: Schema.Array(Answer),
}) {}
class Rejected extends Schema.Class<Rejected>("QuestionRejected")({
sessionID: SessionID,
requestID: QuestionID,
}) {}
export const Reply = z.object({
answers: z
.array(Answer)
.describe("User answers in order of questions (each answer is an array of selected labels)"),
})
export type Reply = z.infer<typeof Reply>
export const Event = {
Asked: BusEvent.define("question.asked", Request.zod),
Replied: BusEvent.define("question.replied", zod(Replied)),
Rejected: BusEvent.define("question.rejected", zod(Rejected)),
Asked: BusEvent.define("question.asked", Request),
Replied: BusEvent.define(
"question.replied",
z.object({
sessionID: SessionID.zod,
requestID: QuestionID.zod,
answers: z.array(Answer),
}),
),
Rejected: BusEvent.define(
"question.rejected",
z.object({
sessionID: SessionID.zod,
requestID: QuestionID.zod,
}),
),
}
export class RejectedError extends Schema.TaggedErrorClass<RejectedError>()("QuestionRejectedError", {}) {
@@ -108,7 +83,7 @@ export namespace Question {
interface PendingEntry {
info: Request
deferred: Deferred.Deferred<ReadonlyArray<Answer>, RejectedError>
deferred: Deferred.Deferred<Answer[], RejectedError>
}
interface State {
@@ -120,12 +95,12 @@ export namespace Question {
export interface Interface {
readonly ask: (input: {
sessionID: SessionID
questions: ReadonlyArray<Info>
tool?: Tool
}) => Effect.Effect<ReadonlyArray<Answer>, RejectedError>
readonly reply: (input: { requestID: QuestionID; answers: ReadonlyArray<Answer> }) => Effect.Effect<void>
questions: Info[]
tool?: { messageID: MessageID; callID: string }
}) => Effect.Effect<Answer[], RejectedError>
readonly reply: (input: { requestID: QuestionID; answers: Answer[] }) => Effect.Effect<void>
readonly reject: (requestID: QuestionID) => Effect.Effect<void>
readonly list: () => Effect.Effect<ReadonlyArray<Request>>
readonly list: () => Effect.Effect<Request[]>
}
export class Service extends Context.Service<Service, Interface>()("@opencode/Question") {}
@@ -155,20 +130,20 @@ export namespace Question {
const ask = Effect.fn("Question.ask")(function* (input: {
sessionID: SessionID
questions: ReadonlyArray<Info>
tool?: Tool
questions: Info[]
tool?: { messageID: MessageID; callID: string }
}) {
const pending = (yield* InstanceState.get(state)).pending
const id = QuestionID.ascending()
log.info("asking", { id, questions: input.questions.length })
const deferred = yield* Deferred.make<ReadonlyArray<Answer>, RejectedError>()
const info = Schema.decodeUnknownSync(Request)({
const deferred = yield* Deferred.make<Answer[], RejectedError>()
const info: Request = {
id,
sessionID: input.sessionID,
questions: input.questions,
tool: input.tool,
})
}
pending.set(id, { info, deferred })
yield* bus.publish(Event.Asked, info)
@@ -180,10 +155,7 @@ export namespace Question {
)
})
const reply = Effect.fn("Question.reply")(function* (input: {
requestID: QuestionID
answers: ReadonlyArray<Answer>
}) {
const reply = Effect.fn("Question.reply")(function* (input: { requestID: QuestionID; answers: Answer[] }) {
const pending = (yield* InstanceState.get(state)).pending
const existing = pending.get(input.requestID)
if (!existing) {

View File

@@ -18,7 +18,6 @@ import { lazy } from "../../util/lazy"
import { Effect, Option } from "effect"
import { WorkspaceRoutes } from "./workspace"
import { Agent } from "@/agent/agent"
import { HttpApiRoutes } from "./httpapi"
const ConsoleOrgOption = z.object({
accountID: z.string(),
@@ -40,7 +39,6 @@ const ConsoleSwitchBody = z.object({
export const ExperimentalRoutes = lazy(() =>
new Hono()
.route("/httpapi", HttpApiRoutes())
.get(
"/console",
describeRoute({

View File

@@ -1,7 +0,0 @@
import { lazy } from "@/util/lazy"
import { Hono } from "hono"
import { QuestionHttpApiHandler } from "./question"
export const HttpApiRoutes = lazy(() =>
new Hono().all("/question", QuestionHttpApiHandler).all("/question/*", QuestionHttpApiHandler),
)

View File

@@ -1,94 +0,0 @@
import { AppLayer } from "@/effect/app-runtime"
import { memoMap } from "@/effect/run-service"
import { Question } from "@/question"
import { QuestionID } from "@/question/schema"
import { lazy } from "@/util/lazy"
import { Effect, Layer, Schema } from "effect"
import { HttpRouter, HttpServer } from "effect/unstable/http"
import { HttpApi, HttpApiBuilder, HttpApiEndpoint, HttpApiGroup, OpenApi } from "effect/unstable/httpapi"
import type { Handler } from "hono"
const root = "/experimental/httpapi/question"
export const QuestionApi = HttpApi.make("question")
.add(
HttpApiGroup.make("question")
.add(
HttpApiEndpoint.get("list", root, {
success: Schema.Array(Question.Request),
}).annotateMerge(
OpenApi.annotations({
identifier: "question.list",
summary: "List pending questions",
description: "Get all pending question requests across all sessions.",
}),
),
HttpApiEndpoint.post("reply", `${root}/:requestID/reply`, {
params: { requestID: QuestionID },
payload: Question.Reply,
success: Schema.Boolean,
}).annotateMerge(
OpenApi.annotations({
identifier: "question.reply",
summary: "Reply to question request",
description: "Provide answers to a question request from the AI assistant.",
}),
),
)
.annotateMerge(
OpenApi.annotations({
title: "question",
description: "Experimental HttpApi question routes.",
}),
),
)
.annotateMerge(
OpenApi.annotations({
title: "opencode experimental HttpApi",
version: "0.0.1",
description: "Experimental HttpApi surface for selected instance routes.",
}),
)
export const QuestionLive = HttpApiBuilder.group(
QuestionApi,
"question",
Effect.fn("QuestionHttpApi.handlers")(function* (handlers) {
const svc = yield* Question.Service
const list = Effect.fn("QuestionHttpApi.list")(function* () {
return yield* svc.list()
})
const reply = Effect.fn("QuestionHttpApi.reply")(function* (ctx: {
params: { requestID: QuestionID }
payload: Question.Reply
}) {
yield* svc.reply({
requestID: ctx.params.requestID,
answers: ctx.payload.answers,
})
return true
})
return handlers.handle("list", list).handle("reply", reply)
}),
).pipe(Layer.provide(Question.defaultLayer))
const web = lazy(() =>
HttpRouter.toWebHandler(
Layer.mergeAll(
AppLayer,
HttpApiBuilder.layer(QuestionApi, { openapiPath: `${root}/doc` }).pipe(
Layer.provide(QuestionLive),
Layer.provide(HttpServer.layerServices),
),
),
{
disableLogger: true,
memoMap,
},
),
)
export const QuestionHttpApiHandler: Handler = (c, _next) => web().handler(c.req.raw)

View File

@@ -1,104 +0,0 @@
import { NodeHttpServer } from "@effect/platform-node"
import { Context, Effect, Exit, Layer, Scope } from "effect"
import { HttpApiBuilder } from "effect/unstable/httpapi"
import { HttpRouter, HttpServer, HttpServerRequest, HttpServerResponse } from "effect/unstable/http"
import { createServer } from "node:http"
import { AppRuntime } from "@/effect/app-runtime"
import { InstanceRef, WorkspaceRef } from "@/effect/instance-ref"
import { memoMap } from "@/effect/run-service"
import { Flag } from "@/flag/flag"
import { InstanceBootstrap } from "@/project/bootstrap"
import { Instance } from "@/project/instance"
import { Filesystem } from "@/util/filesystem"
import { QuestionApi, QuestionLive } from "./question"
export namespace ExperimentalHttpApiServer {
export type Listener = {
hostname: string
port: number
url: URL
stop: () => Promise<void>
}
function text(input: string, status: number, headers?: Record<string, string>) {
return HttpServerResponse.text(input, { status, headers })
}
function decode(input: string) {
try {
return decodeURIComponent(input)
} catch {
return input
}
}
const auth = <E, R>(effect: Effect.Effect<HttpServerResponse.HttpServerResponse, E, R>) =>
Effect.gen(function* () {
if (!Flag.OPENCODE_SERVER_PASSWORD) return yield* effect
const req = yield* HttpServerRequest.HttpServerRequest
const url = new URL(req.url, "http://localhost")
const token = url.searchParams.get("auth_token")
const header = token ? `Basic ${token}` : req.headers.authorization
const expected = `Basic ${Buffer.from(`${Flag.OPENCODE_SERVER_USERNAME ?? "opencode"}:${Flag.OPENCODE_SERVER_PASSWORD}`).toString("base64")}`
if (header === expected) return yield* effect
return text("Unauthorized", 401, {
"www-authenticate": 'Basic realm="opencode experimental httpapi"',
})
})
const instance = <E, R>(effect: Effect.Effect<HttpServerResponse.HttpServerResponse, E, R>) =>
Effect.gen(function* () {
const req = yield* HttpServerRequest.HttpServerRequest
const url = new URL(req.url, "http://localhost")
const raw = url.searchParams.get("directory") || req.headers["x-opencode-directory"] || process.cwd()
const workspace = url.searchParams.get("workspace") || undefined
const ctx = yield* Effect.promise(() =>
Instance.provide({
directory: Filesystem.resolve(decode(raw)),
init: () => AppRuntime.runPromise(InstanceBootstrap),
fn: () => Instance.current,
}),
)
const next = workspace ? effect.pipe(Effect.provideService(WorkspaceRef, workspace)) : effect
return yield* next.pipe(Effect.provideService(InstanceRef, ctx))
})
export async function listen(opts: { hostname: string; port: number }): Promise<Listener> {
const scope = await Effect.runPromise(Scope.make())
const serverLayer = NodeHttpServer.layer(createServer, { port: opts.port, host: opts.hostname })
const routes = HttpApiBuilder.layer(QuestionApi, { openapiPath: "/experimental/httpapi/question/doc" }).pipe(
Layer.provide(QuestionLive),
)
const live = Layer.mergeAll(
serverLayer,
HttpRouter.serve(routes, {
disableListenLog: true,
disableLogger: true,
middleware: (effect) => auth(instance(effect)),
}).pipe(Layer.provide(serverLayer)),
)
const ctx = await Effect.runPromise(Layer.buildWithMemoMap(live, memoMap, scope))
const server = Context.get(ctx, HttpServer.HttpServer)
if (server.address._tag !== "TcpAddress") {
await Effect.runPromise(Scope.close(scope, Exit.void))
throw new Error("Experimental HttpApi server requires a TCP address")
}
const url = new URL("http://localhost")
url.hostname = server.address.hostname
url.port = String(server.address.port)
return {
hostname: server.address.hostname,
port: server.address.port,
url,
stop: () => Effect.runPromise(Scope.close(scope, Exit.void)),
}
}
}

View File

@@ -1,7 +1,6 @@
import { Hono } from "hono"
import { describeRoute, validator, resolver } from "hono-openapi"
import z from "zod"
import { AppRuntime } from "@/effect/app-runtime"
import { Permission } from "@/permission"
import { PermissionID } from "@/permission/schema"
import { errors } from "../error"
@@ -37,15 +36,11 @@ export const PermissionRoutes = lazy(() =>
async (c) => {
const params = c.req.valid("param")
const json = c.req.valid("json")
await AppRuntime.runPromise(
Permission.Service.use((svc) =>
svc.reply({
requestID: params.requestID,
reply: json.reply,
message: json.message,
}),
),
)
await Permission.reply({
requestID: params.requestID,
reply: json.reply,
message: json.message,
})
return c.json(true)
},
)
@@ -67,7 +62,7 @@ export const PermissionRoutes = lazy(() =>
},
}),
async (c) => {
const permissions = await AppRuntime.runPromise(Permission.Service.use((svc) => svc.list()))
const permissions = await Permission.list()
return c.json(permissions)
},
),

View File

@@ -21,7 +21,7 @@ export const QuestionRoutes = lazy(() =>
description: "List of pending questions",
content: {
"application/json": {
schema: resolver(Question.Request.zod.array()),
schema: resolver(Question.Request.array()),
},
},
},
@@ -56,7 +56,7 @@ export const QuestionRoutes = lazy(() =>
requestID: QuestionID.zod,
}),
),
validator("json", Question.Reply.zod),
validator("json", Question.Reply),
async (c) => {
const params = c.req.valid("param")
const json = c.req.valid("json")

View File

@@ -1070,14 +1070,10 @@ export const SessionRoutes = lazy(() =>
validator("json", z.object({ response: Permission.Reply })),
async (c) => {
const params = c.req.valid("param")
await AppRuntime.runPromise(
Permission.Service.use((svc) =>
svc.reply({
requestID: params.permissionID,
reply: c.req.valid("json").response,
}),
),
)
Permission.reply({
requestID: params.permissionID,
reply: c.req.valid("json").response,
})
return c.json(true)
},
),

View File

@@ -1,6 +1,7 @@
import { Provider } from "@/provider/provider"
import { Log } from "@/util/log"
import { Context, Effect, Layer, Record } from "effect"
import { Cause, Effect, Layer, Record, Context } from "effect"
import * as Queue from "effect/Queue"
import * as Stream from "effect/Stream"
import { streamText, wrapLanguageModel, type ModelMessage, type Tool, tool, jsonSchema } from "ai"
import { mergeDeep, pipe } from "remeda"
@@ -20,13 +21,10 @@ import { Wildcard } from "@/util/wildcard"
import { SessionID } from "@/session/schema"
import { Auth } from "@/auth"
import { Installation } from "@/installation"
import { makeRuntime } from "@/effect/run-service"
export namespace LLM {
const log = Log.create({ service: "llm" })
const perms = makeRuntime(Permission.Service, Permission.defaultLayer)
export const OUTPUT_TOKEN_MAX = ProviderTransform.OUTPUT_TOKEN_MAX
type Result = Awaited<ReturnType<typeof streamText>>
export type StreamInput = {
user: MessageV2.User
@@ -47,7 +45,7 @@ export namespace LLM {
abort: AbortSignal
}
export type Event = Result["fullStream"] extends AsyncIterable<infer T> ? T : never
export type Event = Awaited<ReturnType<typeof stream>>["fullStream"] extends AsyncIterable<infer T> ? T : never
export interface Interface {
readonly stream: (input: StreamInput) => Stream.Stream<Event, unknown>
@@ -55,340 +53,12 @@ export namespace LLM {
export class Service extends Context.Service<Service, Interface>()("@opencode/LLM") {}
export const layer: Layer.Layer<Service, never, Auth.Service | Config.Service | Provider.Service | Plugin.Service> =
Layer.effect(
Service,
Effect.gen(function* () {
const auth = yield* Auth.Service
const config = yield* Config.Service
const provider = yield* Provider.Service
const plugin = yield* Plugin.Service
const run = Effect.fn("LLM.run")(function* (input: StreamRequest) {
const l = log
.clone()
.tag("providerID", input.model.providerID)
.tag("modelID", input.model.id)
.tag("sessionID", input.sessionID)
.tag("small", (input.small ?? false).toString())
.tag("agent", input.agent.name)
.tag("mode", input.agent.mode)
l.info("stream", {
modelID: input.model.id,
providerID: input.model.providerID,
})
const [language, cfg, item, info] = yield* Effect.all(
[
provider.getLanguage(input.model),
config.get(),
provider.getProvider(input.model.providerID),
auth.get(input.model.providerID),
],
{ concurrency: "unbounded" },
)
// TODO: move this to a proper hook
const isOpenaiOauth = item.id === "openai" && info?.type === "oauth"
const system: string[] = []
system.push(
[
// use agent prompt otherwise provider prompt
...(input.agent.prompt ? [input.agent.prompt] : SystemPrompt.provider(input.model)),
// any custom prompt passed into this call
...input.system,
// any custom prompt from last user message
...(input.user.system ? [input.user.system] : []),
]
.filter((x) => x)
.join("\n"),
)
const header = system[0]
yield* plugin.trigger(
"experimental.chat.system.transform",
{ sessionID: input.sessionID, model: input.model },
{ system },
)
// rejoin to maintain 2-part structure for caching if header unchanged
if (system.length > 2 && system[0] === header) {
const rest = system.slice(1)
system.length = 0
system.push(header, rest.join("\n"))
}
const variant =
!input.small && input.model.variants && input.user.model.variant
? input.model.variants[input.user.model.variant]
: {}
const base = input.small
? ProviderTransform.smallOptions(input.model)
: ProviderTransform.options({
model: input.model,
sessionID: input.sessionID,
providerOptions: item.options,
})
const options: Record<string, any> = pipe(
base,
mergeDeep(input.model.options),
mergeDeep(input.agent.options),
mergeDeep(variant),
)
if (isOpenaiOauth) {
options.instructions = system.join("\n")
}
const isWorkflow = language instanceof GitLabWorkflowLanguageModel
const messages = isOpenaiOauth
? input.messages
: isWorkflow
? input.messages
: [
...system.map(
(x): ModelMessage => ({
role: "system",
content: x,
}),
),
...input.messages,
]
const params = yield* plugin.trigger(
"chat.params",
{
sessionID: input.sessionID,
agent: input.agent.name,
model: input.model,
provider: item,
message: input.user,
},
{
temperature: input.model.capabilities.temperature
? (input.agent.temperature ?? ProviderTransform.temperature(input.model))
: undefined,
topP: input.agent.topP ?? ProviderTransform.topP(input.model),
topK: ProviderTransform.topK(input.model),
maxOutputTokens: ProviderTransform.maxOutputTokens(input.model),
options,
},
)
const { headers } = yield* plugin.trigger(
"chat.headers",
{
sessionID: input.sessionID,
agent: input.agent.name,
model: input.model,
provider: item,
message: input.user,
},
{
headers: {},
},
)
const tools = resolveTools(input)
// LiteLLM and some Anthropic proxies require the tools parameter to be present
// when message history contains tool calls, even if no tools are being used.
// Add a dummy tool that is never called to satisfy this validation.
// This is enabled for:
// 1. Providers with "litellm" in their ID or API ID (auto-detected)
// 2. Providers with explicit "litellmProxy: true" option (opt-in for custom gateways)
const isLiteLLMProxy =
item.options?.["litellmProxy"] === true ||
input.model.providerID.toLowerCase().includes("litellm") ||
input.model.api.id.toLowerCase().includes("litellm")
// LiteLLM/Bedrock rejects requests where the message history contains tool
// calls but no tools param is present. When there are no active tools (e.g.
// during compaction), inject a stub tool to satisfy the validation requirement.
// The stub description explicitly tells the model not to call it.
if (isLiteLLMProxy && Object.keys(tools).length === 0 && hasToolCalls(input.messages)) {
tools["_noop"] = tool({
description: "Do not call this tool. It exists only for API compatibility and must never be invoked.",
inputSchema: jsonSchema({
type: "object",
properties: {
reason: { type: "string", description: "Unused" },
},
}),
execute: async () => ({ output: "", title: "", metadata: {} }),
})
}
// Wire up toolExecutor for DWS workflow models so that tool calls
// from the workflow service are executed via opencode's tool system
// and results sent back over the WebSocket.
if (language instanceof GitLabWorkflowLanguageModel) {
const workflowModel = language as GitLabWorkflowLanguageModel & {
sessionID?: string
sessionPreapprovedTools?: string[]
approvalHandler?: (approvalTools: { name: string; args: string }[]) => Promise<{ approved: boolean }>
}
workflowModel.sessionID = input.sessionID
workflowModel.systemPrompt = system.join("\n")
workflowModel.toolExecutor = async (toolName, argsJson, _requestID) => {
const t = tools[toolName]
if (!t || !t.execute) {
return { result: "", error: `Unknown tool: ${toolName}` }
}
try {
const result = await t.execute!(JSON.parse(argsJson), {
toolCallId: _requestID,
messages: input.messages,
abortSignal: input.abort,
})
const output = typeof result === "string" ? result : (result?.output ?? JSON.stringify(result))
return {
result: output,
metadata: typeof result === "object" ? result?.metadata : undefined,
title: typeof result === "object" ? result?.title : undefined,
}
} catch (e: any) {
return { result: "", error: e.message ?? String(e) }
}
}
const ruleset = Permission.merge(input.agent.permission ?? [], input.permission ?? [])
workflowModel.sessionPreapprovedTools = Object.keys(tools).filter((name) => {
const match = ruleset.findLast((rule) => Wildcard.match(name, rule.permission))
return !match || match.action !== "ask"
})
const approvedToolsForSession = new Set<string>()
workflowModel.approvalHandler = Instance.bind(async (approvalTools) => {
const uniqueNames = [...new Set(approvalTools.map((t: { name: string }) => t.name))] as string[]
// Auto-approve tools that were already approved in this session
// (prevents infinite approval loops for server-side MCP tools)
if (uniqueNames.every((name) => approvedToolsForSession.has(name))) {
return { approved: true }
}
const id = PermissionID.ascending()
let reply: Permission.Reply | undefined
let unsub: (() => void) | undefined
try {
unsub = Bus.subscribe(Permission.Event.Replied, (evt) => {
if (evt.properties.requestID === id) reply = evt.properties.reply
})
const toolPatterns = approvalTools.map((t: { name: string; args: string }) => {
try {
const parsed = JSON.parse(t.args) as Record<string, unknown>
const title = (parsed?.title ?? parsed?.name ?? "") as string
return title ? `${t.name}: ${title}` : t.name
} catch {
return t.name
}
})
const uniquePatterns = [...new Set(toolPatterns)] as string[]
await perms.runPromise((svc) =>
svc.ask({
id,
sessionID: SessionID.make(input.sessionID),
permission: "workflow_tool_approval",
patterns: uniquePatterns,
metadata: { tools: approvalTools },
always: uniquePatterns,
ruleset: [],
}),
)
for (const name of uniqueNames) approvedToolsForSession.add(name)
workflowModel.sessionPreapprovedTools = [
...(workflowModel.sessionPreapprovedTools ?? []),
...uniqueNames,
]
return { approved: true }
} catch {
return { approved: false }
} finally {
unsub?.()
}
})
}
return streamText({
onError(error) {
l.error("stream error", {
error,
})
},
async experimental_repairToolCall(failed) {
const lower = failed.toolCall.toolName.toLowerCase()
if (lower !== failed.toolCall.toolName && tools[lower]) {
l.info("repairing tool call", {
tool: failed.toolCall.toolName,
repaired: lower,
})
return {
...failed.toolCall,
toolName: lower,
}
}
return {
...failed.toolCall,
input: JSON.stringify({
tool: failed.toolCall.toolName,
error: failed.error.message,
}),
toolName: "invalid",
}
},
temperature: params.temperature,
topP: params.topP,
topK: params.topK,
providerOptions: ProviderTransform.providerOptions(input.model, params.options),
activeTools: Object.keys(tools).filter((x) => x !== "invalid"),
tools,
toolChoice: input.toolChoice,
maxOutputTokens: params.maxOutputTokens,
abortSignal: input.abort,
headers: {
...(input.model.providerID.startsWith("opencode")
? {
"x-opencode-project": Instance.project.id,
"x-opencode-session": input.sessionID,
"x-opencode-request": input.user.id,
"x-opencode-client": Flag.OPENCODE_CLIENT,
}
: {
"x-session-affinity": input.sessionID,
...(input.parentSessionID ? { "x-parent-session-id": input.parentSessionID } : {}),
"User-Agent": `opencode/${Installation.VERSION}`,
}),
...input.model.headers,
...headers,
},
maxRetries: input.retries ?? 0,
messages,
model: wrapLanguageModel({
model: language,
middleware: [
{
specificationVersion: "v3" as const,
async transformParams(args) {
if (args.type === "stream") {
// @ts-expect-error
args.params.prompt = ProviderTransform.message(args.params.prompt, input.model, options)
}
return args.params
},
},
],
}),
experimental_telemetry: {
isEnabled: cfg.experimental?.openTelemetry,
metadata: {
userId: cfg.username ?? "unknown",
sessionId: input.sessionID,
},
},
})
})
const stream: Interface["stream"] = (input) =>
Stream.scoped(
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
return Service.of({
stream(input) {
return Stream.scoped(
Stream.unwrap(
Effect.gen(function* () {
const ctrl = yield* Effect.acquireRelease(
@@ -396,7 +66,7 @@ export namespace LLM {
(ctrl) => Effect.sync(() => ctrl.abort()),
)
const result = yield* run({ ...input, abort: ctrl.signal })
const result = yield* Effect.promise(() => LLM.stream({ ...input, abort: ctrl.signal }))
return Stream.fromAsyncIterable(result.fullStream, (e) =>
e instanceof Error ? e : new Error(String(e)),
@@ -404,19 +74,335 @@ export namespace LLM {
}),
),
)
},
})
}),
)
return Service.of({ stream })
}),
export const defaultLayer = layer
export async function stream(input: StreamRequest) {
const l = log
.clone()
.tag("providerID", input.model.providerID)
.tag("modelID", input.model.id)
.tag("sessionID", input.sessionID)
.tag("small", (input.small ?? false).toString())
.tag("agent", input.agent.name)
.tag("mode", input.agent.mode)
l.info("stream", {
modelID: input.model.id,
providerID: input.model.providerID,
})
const [language, cfg, provider, info] = await Effect.runPromise(
Effect.gen(function* () {
const auth = yield* Auth.Service
const cfg = yield* Config.Service
const provider = yield* Provider.Service
return yield* Effect.all(
[
provider.getLanguage(input.model),
cfg.get(),
provider.getProvider(input.model.providerID),
auth.get(input.model.providerID),
],
{ concurrency: "unbounded" },
)
}).pipe(Effect.provide(Layer.mergeAll(Auth.defaultLayer, Config.defaultLayer, Provider.defaultLayer))),
)
// TODO: move this to a proper hook
const isOpenaiOauth = provider.id === "openai" && info?.type === "oauth"
const system: string[] = []
system.push(
[
// use agent prompt otherwise provider prompt
...(input.agent.prompt ? [input.agent.prompt] : SystemPrompt.provider(input.model)),
// any custom prompt passed into this call
...input.system,
// any custom prompt from last user message
...(input.user.system ? [input.user.system] : []),
]
.filter((x) => x)
.join("\n"),
)
export const defaultLayer = Layer.suspend(() =>
layer.pipe(
Layer.provide(Auth.defaultLayer),
Layer.provide(Config.defaultLayer),
Layer.provide(Provider.defaultLayer),
Layer.provide(Plugin.defaultLayer),
),
)
const header = system[0]
await Plugin.trigger(
"experimental.chat.system.transform",
{ sessionID: input.sessionID, model: input.model },
{ system },
)
// rejoin to maintain 2-part structure for caching if header unchanged
if (system.length > 2 && system[0] === header) {
const rest = system.slice(1)
system.length = 0
system.push(header, rest.join("\n"))
}
const variant =
!input.small && input.model.variants && input.user.model.variant
? input.model.variants[input.user.model.variant]
: {}
const base = input.small
? ProviderTransform.smallOptions(input.model)
: ProviderTransform.options({
model: input.model,
sessionID: input.sessionID,
providerOptions: provider.options,
})
const options: Record<string, any> = pipe(
base,
mergeDeep(input.model.options),
mergeDeep(input.agent.options),
mergeDeep(variant),
)
if (isOpenaiOauth) {
options.instructions = system.join("\n")
}
const isWorkflow = language instanceof GitLabWorkflowLanguageModel
const messages = isOpenaiOauth
? input.messages
: isWorkflow
? input.messages
: [
...system.map(
(x): ModelMessage => ({
role: "system",
content: x,
}),
),
...input.messages,
]
const params = await Plugin.trigger(
"chat.params",
{
sessionID: input.sessionID,
agent: input.agent.name,
model: input.model,
provider,
message: input.user,
},
{
temperature: input.model.capabilities.temperature
? (input.agent.temperature ?? ProviderTransform.temperature(input.model))
: undefined,
topP: input.agent.topP ?? ProviderTransform.topP(input.model),
topK: ProviderTransform.topK(input.model),
maxOutputTokens: ProviderTransform.maxOutputTokens(input.model),
options,
},
)
const { headers } = await Plugin.trigger(
"chat.headers",
{
sessionID: input.sessionID,
agent: input.agent.name,
model: input.model,
provider,
message: input.user,
},
{
headers: {},
},
)
const tools = resolveTools(input)
// LiteLLM and some Anthropic proxies require the tools parameter to be present
// when message history contains tool calls, even if no tools are being used.
// Add a dummy tool that is never called to satisfy this validation.
// This is enabled for:
// 1. Providers with "litellm" in their ID or API ID (auto-detected)
// 2. Providers with explicit "litellmProxy: true" option (opt-in for custom gateways)
const isLiteLLMProxy =
provider.options?.["litellmProxy"] === true ||
input.model.providerID.toLowerCase().includes("litellm") ||
input.model.api.id.toLowerCase().includes("litellm")
// LiteLLM/Bedrock rejects requests where the message history contains tool
// calls but no tools param is present. When there are no active tools (e.g.
// during compaction), inject a stub tool to satisfy the validation requirement.
// The stub description explicitly tells the model not to call it.
if (isLiteLLMProxy && Object.keys(tools).length === 0 && hasToolCalls(input.messages)) {
tools["_noop"] = tool({
description: "Do not call this tool. It exists only for API compatibility and must never be invoked.",
inputSchema: jsonSchema({
type: "object",
properties: {
reason: { type: "string", description: "Unused" },
},
}),
execute: async () => ({ output: "", title: "", metadata: {} }),
})
}
// Wire up toolExecutor for DWS workflow models so that tool calls
// from the workflow service are executed via opencode's tool system
// and results sent back over the WebSocket.
if (language instanceof GitLabWorkflowLanguageModel) {
const workflowModel = language as GitLabWorkflowLanguageModel & {
sessionID?: string
sessionPreapprovedTools?: string[]
approvalHandler?: (approvalTools: { name: string; args: string }[]) => Promise<{ approved: boolean }>
}
workflowModel.sessionID = input.sessionID
workflowModel.systemPrompt = system.join("\n")
workflowModel.toolExecutor = async (toolName, argsJson, _requestID) => {
const t = tools[toolName]
if (!t || !t.execute) {
return { result: "", error: `Unknown tool: ${toolName}` }
}
try {
const result = await t.execute!(JSON.parse(argsJson), {
toolCallId: _requestID,
messages: input.messages,
abortSignal: input.abort,
})
const output = typeof result === "string" ? result : (result?.output ?? JSON.stringify(result))
return {
result: output,
metadata: typeof result === "object" ? result?.metadata : undefined,
title: typeof result === "object" ? result?.title : undefined,
}
} catch (e: any) {
return { result: "", error: e.message ?? String(e) }
}
}
const ruleset = Permission.merge(input.agent.permission ?? [], input.permission ?? [])
workflowModel.sessionPreapprovedTools = Object.keys(tools).filter((name) => {
const match = ruleset.findLast((rule) => Wildcard.match(name, rule.permission))
return !match || match.action !== "ask"
})
const approvedToolsForSession = new Set<string>()
workflowModel.approvalHandler = Instance.bind(async (approvalTools) => {
const uniqueNames = [...new Set(approvalTools.map((t: { name: string }) => t.name))] as string[]
// Auto-approve tools that were already approved in this session
// (prevents infinite approval loops for server-side MCP tools)
if (uniqueNames.every((name) => approvedToolsForSession.has(name))) {
return { approved: true }
}
const id = PermissionID.ascending()
let reply: Permission.Reply | undefined
let unsub: (() => void) | undefined
try {
unsub = Bus.subscribe(Permission.Event.Replied, (evt) => {
if (evt.properties.requestID === id) reply = evt.properties.reply
})
const toolPatterns = approvalTools.map((t: { name: string; args: string }) => {
try {
const parsed = JSON.parse(t.args) as Record<string, unknown>
const title = (parsed?.title ?? parsed?.name ?? "") as string
return title ? `${t.name}: ${title}` : t.name
} catch {
return t.name
}
})
const uniquePatterns = [...new Set(toolPatterns)] as string[]
await Permission.ask({
id,
sessionID: SessionID.make(input.sessionID),
permission: "workflow_tool_approval",
patterns: uniquePatterns,
metadata: { tools: approvalTools },
always: uniquePatterns,
ruleset: [],
})
for (const name of uniqueNames) approvedToolsForSession.add(name)
workflowModel.sessionPreapprovedTools = [...(workflowModel.sessionPreapprovedTools ?? []), ...uniqueNames]
return { approved: true }
} catch {
return { approved: false }
} finally {
unsub?.()
}
})
}
return streamText({
onError(error) {
l.error("stream error", {
error,
})
},
async experimental_repairToolCall(failed) {
const lower = failed.toolCall.toolName.toLowerCase()
if (lower !== failed.toolCall.toolName && tools[lower]) {
l.info("repairing tool call", {
tool: failed.toolCall.toolName,
repaired: lower,
})
return {
...failed.toolCall,
toolName: lower,
}
}
return {
...failed.toolCall,
input: JSON.stringify({
tool: failed.toolCall.toolName,
error: failed.error.message,
}),
toolName: "invalid",
}
},
temperature: params.temperature,
topP: params.topP,
topK: params.topK,
providerOptions: ProviderTransform.providerOptions(input.model, params.options),
activeTools: Object.keys(tools).filter((x) => x !== "invalid"),
tools,
toolChoice: input.toolChoice,
maxOutputTokens: params.maxOutputTokens,
abortSignal: input.abort,
headers: {
...(input.model.providerID.startsWith("opencode")
? {
"x-opencode-project": Instance.project.id,
"x-opencode-session": input.sessionID,
"x-opencode-request": input.user.id,
"x-opencode-client": Flag.OPENCODE_CLIENT,
}
: {
"x-session-affinity": input.sessionID,
...(input.parentSessionID ? { "x-parent-session-id": input.parentSessionID } : {}),
"User-Agent": `opencode/${Installation.VERSION}`,
}),
...input.model.headers,
...headers,
},
maxRetries: input.retries ?? 0,
messages,
model: wrapLanguageModel({
model: language,
middleware: [
{
specificationVersion: "v3" as const,
async transformParams(args) {
if (args.type === "stream") {
// @ts-expect-error
args.params.prompt = ProviderTransform.message(args.params.prompt, input.model, options)
}
return args.params
},
},
],
}),
experimental_telemetry: {
isEnabled: cfg.experimental?.openTelemetry,
metadata: {
userId: cfg.username ?? "unknown",
sessionId: input.sessionID,
},
},
})
}
function resolveTools(input: Pick<StreamInput, "tools" | "agent" | "permission" | "user">) {
const disabled = Permission.disabled(

View File

@@ -0,0 +1,301 @@
import path from "path"
import * as NodeFS from "fs/promises"
import { InstanceState } from "@/effect/instance-state"
import { AppFileSystem } from "@/filesystem"
import { Shell } from "@/shell/shell"
import { Effect, Layer, Scope, Deferred, Stream, Context, Exit, Schema, Struct } from "effect"
import { ChildProcess } from "effect/unstable/process"
import { ChildProcessSpawner } from "effect/unstable/process/ChildProcessSpawner"
import type { ChildProcessHandle } from "effect/unstable/process/ChildProcessSpawner"
import { JobID } from "./schema"
const PS = new Set(["powershell", "pwsh"])
export namespace ShellJob {
export const Status = Schema.Literals(["running", "completed", "failed", "killed", "timed_out"])
export type Status = Schema.Schema.Type<typeof Status>
export class Info extends Schema.Class<Info>("ShellJob.Info")({
id: JobID,
command: Schema.String,
cwd: Schema.String,
shell: Schema.String,
title: Schema.optional(Schema.String),
status: Status,
pid: Schema.optional(Schema.Number),
started_at: Schema.Number,
ended_at: Schema.optional(Schema.Number),
exit_code: Schema.optional(Schema.NullOr(Schema.Number)),
output_path: Schema.String,
meta_path: Schema.String,
cursor: Schema.Number,
}) {}
export class Output extends Schema.Class<Output>("ShellJob.Output")({
text: Schema.String,
cursor: Schema.Number,
done: Schema.Boolean,
}) {}
export class StartInput extends Schema.Class<StartInput>("ShellJob.StartInput")({
command: Schema.String,
cwd: Schema.optional(Schema.String),
shell: Schema.optional(Schema.String),
title: Schema.optional(Schema.String),
timeout: Schema.optional(Schema.Number),
env: Schema.optional(Schema.Record(Schema.String, Schema.String)),
}) {}
export class WaitInput extends Schema.Class<WaitInput>("ShellJob.WaitInput")({
id: JobID,
timeout: Schema.optional(Schema.Number),
}) {}
export class OutputInput extends Schema.Class<OutputInput>("ShellJob.OutputInput")({
id: JobID,
cursor: Schema.optional(Schema.Number),
}) {}
type Active = {
info: Struct.Mutable<Info>
next: Status | undefined
done: Deferred.Deferred<Info>
handle: ChildProcessHandle | undefined
}
type State = {
dir: string
root: string
jobs: Map<JobID, Active>
scope: Scope.Scope
}
export interface Interface {
readonly list: () => Effect.Effect<Info[]>
readonly get: (id: JobID) => Effect.Effect<Info | undefined>
readonly start: (input: StartInput) => Effect.Effect<Info>
readonly output: (input: OutputInput) => Effect.Effect<Output | undefined>
readonly wait: (input: WaitInput) => Effect.Effect<Info | undefined>
readonly kill: (id: JobID) => Effect.Effect<Info | undefined>
}
export class Service extends Context.Service<Service, Interface>()("@opencode/ShellJob") {}
function spawn(shell: string, name: string, command: string, cwd: string, env: NodeJS.ProcessEnv) {
if (process.platform === "win32" && PS.has(name)) {
return ChildProcess.make(shell, ["-NoLogo", "-NoProfile", "-NonInteractive", "-Command", command], {
cwd,
env,
stdin: "ignore",
detached: false,
})
}
return ChildProcess.make(command, [], {
shell,
cwd,
env,
stdin: "ignore",
detached: process.platform !== "win32",
})
}
const snap = (job: Active) =>
new Info({
...job.info,
id: String(job.info.id),
})
export const layer: Layer.Layer<Service, never, AppFileSystem.Service | ChildProcessSpawner> = Layer.effect(
Service,
Effect.gen(function* () {
const fs = yield* AppFileSystem.Service
const spawner = yield* ChildProcessSpawner
const append = Effect.fn("ShellJob.append")(function* (job: Active, chunk: string) {
yield* Effect.tryPromise({
try: () => NodeFS.appendFile(job.info.output_path, chunk, "utf8"),
catch: () => new Error("Failed to append shell job output"),
}).pipe(Effect.orDie)
})
const write = Effect.fn("ShellJob.write")(function* (job: Active) {
yield* fs.writeJson(job.info.meta_path, job.info).pipe(Effect.orDie)
})
const end = Effect.fn("ShellJob.end")(function* (job: Active, status: Status, code?: number | null) {
if (job.info.status !== "running") return snap(job)
job.info.status = status
job.info.ended_at = Date.now()
job.info.exit_code = code
job.handle = undefined
job.next = undefined
yield* write(job)
const info = snap(job)
yield* Deferred.succeed(job.done, info).pipe(Effect.ignore)
return info
})
const watch = Effect.fn("ShellJob.watch")(function* (job: Active, timeout?: number) {
const handle = job.handle
if (!handle) return snap(job)
if (timeout) {
yield* Effect.sleep(`${timeout} millis`).pipe(
Effect.andThen(
Effect.gen(function* () {
if (job.info.status !== "running") return
job.next = "timed_out"
yield* handle.kill({ forceKillAfter: "3 seconds" }).pipe(Effect.ignore)
}),
),
Effect.forkScoped,
)
}
yield* Effect.forkScoped(
Stream.runForEach(Stream.decodeText(handle.all), (chunk) =>
Effect.gen(function* () {
job.info.cursor += chunk.length
yield* append(job, chunk)
}),
),
)
const exit = yield* Effect.exit(handle.exitCode)
if (Exit.isSuccess(exit)) {
const code = Number(exit.value)
return yield* end(job, code === 0 ? "completed" : "failed", code)
}
return yield* end(job, job.next ?? "killed", null)
})
const state = yield* InstanceState.make<State>(
Effect.fn("ShellJob.state")(function* (ctx) {
const dir = ctx.project.vcs ? ctx.worktree : ctx.directory
const root = path.join(dir, ".opencode", "jobs")
const state: State = {
dir: ctx.directory,
root,
jobs: new Map(),
scope: yield* Scope.Scope,
}
yield* fs.ensureDir(root).pipe(Effect.orDie)
yield* Effect.addFinalizer(() =>
Effect.gen(function* () {
state.jobs.clear()
}),
)
return state
}),
)
const list: Interface["list"] = Effect.fn("ShellJob.list")(function* () {
const s = yield* InstanceState.get(state)
return Array.from(s.jobs.values())
.map(snap)
.toSorted((a, b) => a.started_at - b.started_at)
})
const get: Interface["get"] = Effect.fn("ShellJob.get")(function* (id: JobID) {
const s = yield* InstanceState.get(state)
const job = s.jobs.get(id)
if (!job) return
return snap(job)
})
const start: Interface["start"] = Effect.fn("ShellJob.start")(function* (input: StartInput) {
const s = yield* InstanceState.get(state)
const id = JobID.ascending()
const dir = path.join(s.root, String(id))
const cwd = input.cwd ?? s.dir
const shell = input.shell ?? Shell.acceptable()
const name = Shell.name(shell)
const handle = yield* Scope.provide(s.scope)(
spawner.spawn(
spawn(shell, name, input.command, cwd, {
...process.env,
...input.env,
}),
),
).pipe(Effect.orDie)
const job: Active = {
info: {
id,
command: input.command,
cwd,
shell,
title: input.title,
status: "running",
pid: Number(handle.pid),
started_at: Date.now(),
output_path: path.join(dir, "output.log"),
meta_path: path.join(dir, "meta.json"),
cursor: 0,
} satisfies Struct.Mutable<Info>,
next: undefined,
done: yield* Deferred.make<Info>(),
handle,
}
s.jobs.set(id, job)
yield* fs.writeWithDirs(job.info.output_path, "").pipe(Effect.orDie)
yield* write(job)
yield* Effect.sync(() => {
Effect.runFork(Scope.provide(s.scope)(watch(job, input.timeout)))
})
return snap(job)
})
const output: Interface["output"] = Effect.fn("ShellJob.output")(function* (input: OutputInput) {
const s = yield* InstanceState.get(state)
const job = s.jobs.get(input.id)
if (!job) return
const cursor = input.cursor ?? 0
const text = yield* fs.readFileString(job.info.output_path).pipe(Effect.catch(() => Effect.succeed("")))
return new Output({
text: cursor >= text.length ? "" : text.slice(cursor),
cursor: text.length,
done: job.info.status !== "running",
})
})
const wait: Interface["wait"] = Effect.fn("ShellJob.wait")(function* (input: WaitInput) {
const s = yield* InstanceState.get(state)
const job = s.jobs.get(input.id)
if (!job) return
if (job.info.status !== "running") return snap(job)
if (!input.timeout) return yield* Deferred.await(job.done)
return yield* Effect.raceAll([
Deferred.await(job.done),
Effect.sleep(`${input.timeout} millis`).pipe(Effect.as(snap(job))),
])
})
const kill: Interface["kill"] = Effect.fn("ShellJob.kill")(function* (id: JobID) {
const s = yield* InstanceState.get(state)
const job = s.jobs.get(id)
if (!job) return
if (job.info.status !== "running") return snap(job)
if (!job.handle) return snap(job)
if (!job.next) job.next = "killed"
yield* job.handle.kill({ forceKillAfter: "3 seconds" }).pipe(Effect.ignore)
return yield* Deferred.await(job.done)
})
return Service.of({
list,
get,
start,
output,
wait,
kill,
})
}),
)
}

View File

@@ -0,0 +1,10 @@
import { Schema } from "effect"
import { Identifier } from "@/id/id"
import { Newtype } from "@/util/schema"
export class JobID extends Newtype<JobID>()("JobID", Schema.String) {
static ascending(id?: string): JobID {
return this.make(Identifier.ascending("job", id))
}
}

View File

@@ -40,10 +40,6 @@ export const GlobTool = Tool.define(
let search = params.path ?? Instance.directory
search = path.isAbsolute(search) ? search : path.resolve(Instance.directory, search)
const info = yield* fs.stat(search).pipe(Effect.catch(() => Effect.succeed(undefined)))
if (info?.type === "File") {
throw new Error(`glob path must be a directory: ${search}`)
}
yield* assertExternalDirectoryEffect(ctx, search, { kind: "directory" })
const limit = 100

View File

@@ -51,25 +51,19 @@ export const GrepTool = Tool.define(
? (params.path ?? Instance.directory)
: path.join(Instance.directory, params.path ?? "."),
)
const info = yield* fs.stat(searchPath).pipe(Effect.catch(() => Effect.succeed(undefined)))
const cwd = info?.type === "Directory" ? searchPath : path.dirname(searchPath)
const file = info?.type === "Directory" ? undefined : [searchPath]
yield* assertExternalDirectoryEffect(ctx, searchPath, {
kind: info?.type === "Directory" ? "directory" : "file",
})
yield* assertExternalDirectoryEffect(ctx, searchPath, { kind: "directory" })
const result = yield* rg.search({
cwd,
cwd: searchPath,
pattern: params.pattern,
glob: params.include ? [params.include] : undefined,
file,
})
if (result.items.length === 0) return empty
const rows = result.items.map((item) => ({
path: AppFileSystem.resolve(
path.isAbsolute(item.path.text) ? item.path.text : path.join(cwd, item.path.text),
path.isAbsolute(item.path.text) ? item.path.text : path.join(searchPath, item.path.text),
),
line: item.line_number,
text: item.lines.text,

View File

@@ -5,11 +5,11 @@ import { Question } from "../question"
import DESCRIPTION from "./question.txt"
const parameters = z.object({
questions: z.array(Question.Prompt.zod).describe("Questions to ask"),
questions: z.array(Question.Info.omit({ custom: true })).describe("Questions to ask"),
})
type Metadata = {
answers: ReadonlyArray<Question.Answer>
answers: Question.Answer[]
}
export const QuestionTool = Tool.define<typeof parameters, Metadata, Question.Service>(

View File

@@ -76,25 +76,6 @@ describe("Ripgrep.Service", () => {
expect(result.items[0]?.lines.text).toContain("needle")
})
test("search supports explicit file targets", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
await Bun.write(path.join(dir, "match.ts"), "const value = 'needle'\n")
await Bun.write(path.join(dir, "skip.ts"), "const value = 'needle'\n")
},
})
const file = path.join(tmp.path, "match.ts")
const result = await Effect.gen(function* () {
const rg = yield* Ripgrep.Service
return yield* rg.search({ cwd: tmp.path, pattern: "needle", file: [file] })
}).pipe(Effect.provide(Ripgrep.defaultLayer), Effect.runPromise)
expect(result.partial).toBe(false)
expect(result.items).toHaveLength(1)
expect(result.items[0]?.path.text).toBe(file)
})
test("files returns stream of filenames", async () => {
await using tmp = await tmpdir({
init: async (dir) => {

File diff suppressed because it is too large Load Diff

View File

@@ -6,12 +6,12 @@ import { tmpdir } from "../fixture/fixture"
import { SessionID } from "../../src/session/schema"
import { AppRuntime } from "../../src/effect/app-runtime"
const ask = (input: { sessionID: SessionID; questions: ReadonlyArray<Question.Info>; tool?: Question.Tool }) =>
const ask = (input: { sessionID: SessionID; questions: Question.Info[]; tool?: { messageID: any; callID: string } }) =>
AppRuntime.runPromise(Question.Service.use((svc) => svc.ask(input)))
const list = () => AppRuntime.runPromise(Question.Service.use((svc) => svc.list()))
const reply = (input: { requestID: QuestionID; answers: ReadonlyArray<Question.Answer> }) =>
const reply = (input: { requestID: QuestionID; answers: Question.Answer[] }) =>
AppRuntime.runPromise(Question.Service.use((svc) => svc.reply(input)))
const reject = (id: QuestionID) => AppRuntime.runPromise(Question.Service.use((svc) => svc.reject(id)))

View File

@@ -1,73 +0,0 @@
import { afterEach, describe, expect, test } from "bun:test"
import { AppRuntime } from "../../src/effect/app-runtime"
import { Instance } from "../../src/project/instance"
import { Question } from "../../src/question"
import { Server } from "../../src/server/server"
import { ExperimentalHttpApiServer } from "../../src/server/instance/httpapi/server"
import { SessionID } from "../../src/session/schema"
import { Log } from "../../src/util/log"
import { tmpdir } from "../fixture/fixture"
Log.init({ print: false })
const ask = (input: { sessionID: SessionID; questions: ReadonlyArray<Question.Info> }) =>
AppRuntime.runPromise(Question.Service.use((svc) => svc.ask(input)))
afterEach(async () => {
await Instance.disposeAll()
})
describe("experimental question effect httpapi server", () => {
test("serves the question slice directly over effect http", async () => {
await using tmp = await tmpdir({ git: true })
const server = await ExperimentalHttpApiServer.listen({ hostname: "127.0.0.1", port: 0 })
const headers = {
"content-type": "application/json",
"x-opencode-directory": tmp.path,
}
const questions: ReadonlyArray<Question.Info> = [
{
question: "What would you like to do?",
header: "Action",
options: [
{ label: "Option 1", description: "First option" },
{ label: "Option 2", description: "Second option" },
],
},
]
let pending!: ReturnType<typeof ask>
await Instance.provide({
directory: tmp.path,
fn: async () => {
pending = ask({
sessionID: SessionID.make("ses_test"),
questions,
})
},
})
try {
const list = await fetch(`${server.url}/experimental/httpapi/question`, { headers })
expect(list.status).toBe(200)
const items = await list.json()
expect(items).toHaveLength(1)
const doc = await fetch(`${server.url}/experimental/httpapi/question/doc`, { headers })
expect(doc.status).toBe(200)
const spec = await doc.json()
expect(spec.paths["/experimental/httpapi/question"]?.get?.operationId).toBe("question.list")
const reply = await fetch(`${server.url}/experimental/httpapi/question/${items[0].id}/reply`, {
method: "POST",
headers,
body: JSON.stringify({ answers: [["Option 1"]] }),
})
expect(reply.status).toBe(200)
expect(await pending).toEqual([["Option 1"]])
} finally {
await server.stop()
}
})
})

View File

@@ -1,78 +0,0 @@
import { afterEach, describe, expect, test } from "bun:test"
import { AppRuntime } from "../../src/effect/app-runtime"
import { Instance } from "../../src/project/instance"
import { Question } from "../../src/question"
import { Server } from "../../src/server/server"
import { SessionID } from "../../src/session/schema"
import { Log } from "../../src/util/log"
import { tmpdir } from "../fixture/fixture"
Log.init({ print: false })
const ask = (input: { sessionID: SessionID; questions: ReadonlyArray<Question.Info> }) =>
AppRuntime.runPromise(Question.Service.use((svc) => svc.ask(input)))
afterEach(async () => {
await Instance.disposeAll()
})
describe("experimental question httpapi", () => {
test("lists pending questions, replies, and serves docs", async () => {
await using tmp = await tmpdir({ git: true })
const app = Server.Default().app
const headers = {
"content-type": "application/json",
"x-opencode-directory": tmp.path,
}
const questions: ReadonlyArray<Question.Info> = [
{
question: "What would you like to do?",
header: "Action",
options: [
{ label: "Option 1", description: "First option" },
{ label: "Option 2", description: "Second option" },
],
},
]
let pending!: ReturnType<typeof ask>
await Instance.provide({
directory: tmp.path,
fn: async () => {
pending = ask({
sessionID: SessionID.make("ses_test"),
questions,
})
},
})
const list = await app.request("/experimental/httpapi/question", {
headers,
})
expect(list.status).toBe(200)
const items = await list.json()
expect(items).toHaveLength(1)
expect(items[0]).toMatchObject({ questions })
const doc = await app.request("/experimental/httpapi/question/doc", {
headers,
})
expect(doc.status).toBe(200)
const spec = await doc.json()
expect(spec.paths["/experimental/httpapi/question"]?.get?.operationId).toBe("question.list")
expect(spec.paths["/experimental/httpapi/question/{requestID}/reply"]?.post?.operationId).toBe("question.reply")
const reply = await app.request(`/experimental/httpapi/question/${items[0].id}/reply`, {
method: "POST",
headers,
body: JSON.stringify({ answers: [["Option 1"]] }),
})
expect(reply.status).toBe(200)
expect(await reply.json()).toBe(true)
expect(await pending).toEqual([["Option 1"]])
})
})

View File

@@ -26,12 +26,6 @@ async function getModel(providerID: ProviderID, modelID: ModelID) {
)
}
const llm = makeRuntime(LLM.Service, LLM.defaultLayer)
async function drain(input: LLM.StreamInput) {
return llm.runPromise((svc) => svc.stream(input).pipe(Stream.runDrain))
}
describe("session.llm.hasToolCalls", () => {
test("returns false for empty messages array", () => {
expect(LLM.hasToolCalls([])).toBe(false)
@@ -361,16 +355,20 @@ describe("session.llm.stream", () => {
model: { providerID: ProviderID.make(providerID), modelID: resolved.id, variant: "high" },
} satisfies MessageV2.User
await drain({
const stream = await LLM.stream({
user,
sessionID,
model: resolved,
agent,
system: ["You are a helpful assistant."],
abort: new AbortController().signal,
messages: [{ role: "user", content: "Hello" }],
tools: {},
})
for await (const _ of stream.fullStream) {
}
const capture = await request
const body = capture.body
const headers = capture.headers
@@ -395,6 +393,80 @@ describe("session.llm.stream", () => {
})
})
test("raw stream abort signal cancels provider response body promptly", async () => {
const server = state.server
if (!server) throw new Error("Server not initialized")
const providerID = "alibaba"
const modelID = "qwen-plus"
const fixture = await loadFixture(providerID, modelID)
const model = fixture.model
const pending = waitStreamingRequest("/chat/completions")
await using tmp = await tmpdir({
init: async (dir) => {
await Bun.write(
path.join(dir, "opencode.json"),
JSON.stringify({
$schema: "https://opencode.ai/config.json",
enabled_providers: [providerID],
provider: {
[providerID]: {
options: {
apiKey: "test-key",
baseURL: `${server.url.origin}/v1`,
},
},
},
}),
)
},
})
await Instance.provide({
directory: tmp.path,
fn: async () => {
const resolved = await getModel(ProviderID.make(providerID), ModelID.make(model.id))
const sessionID = SessionID.make("session-test-raw-abort")
const agent = {
name: "test",
mode: "primary",
options: {},
permission: [{ permission: "*", pattern: "*", action: "allow" }],
} satisfies Agent.Info
const user = {
id: MessageID.make("user-raw-abort"),
sessionID,
role: "user",
time: { created: Date.now() },
agent: agent.name,
model: { providerID: ProviderID.make(providerID), modelID: resolved.id },
} satisfies MessageV2.User
const ctrl = new AbortController()
const result = await LLM.stream({
user,
sessionID,
model: resolved,
agent,
system: ["You are a helpful assistant."],
abort: ctrl.signal,
messages: [{ role: "user", content: "Hello" }],
tools: {},
})
const iter = result.fullStream[Symbol.asyncIterator]()
await pending.request
await iter.next()
ctrl.abort()
await Promise.race([pending.responseCanceled, timeout(500)])
await Promise.race([pending.requestAborted, timeout(500)]).catch(() => undefined)
await iter.return?.()
},
})
})
test("service stream cancellation cancels provider response body promptly", async () => {
const server = state.server
if (!server) throw new Error("Server not initialized")
@@ -446,7 +518,8 @@ describe("session.llm.stream", () => {
} satisfies MessageV2.User
const ctrl = new AbortController()
const run = llm.runPromiseExit(
const { runPromiseExit } = makeRuntime(LLM.Service, LLM.defaultLayer)
const run = runPromiseExit(
(svc) =>
svc
.stream({
@@ -537,13 +610,14 @@ describe("session.llm.stream", () => {
tools: { question: true },
} satisfies MessageV2.User
await drain({
const stream = await LLM.stream({
user,
sessionID,
model: resolved,
agent,
permission: [{ permission: "question", pattern: "*", action: "allow" }],
system: ["You are a helpful assistant."],
abort: new AbortController().signal,
messages: [{ role: "user", content: "Hello" }],
tools: {
question: tool({
@@ -554,6 +628,9 @@ describe("session.llm.stream", () => {
},
})
for await (const _ of stream.fullStream) {
}
const capture = await request
const tools = capture.body.tools as Array<{ function?: { name?: string } }> | undefined
expect(tools?.some((item) => item.function?.name === "question")).toBe(true)
@@ -651,16 +728,20 @@ describe("session.llm.stream", () => {
model: { providerID: ProviderID.make("openai"), modelID: resolved.id, variant: "high" },
} satisfies MessageV2.User
await drain({
const stream = await LLM.stream({
user,
sessionID,
model: resolved,
agent,
system: ["You are a helpful assistant."],
abort: new AbortController().signal,
messages: [{ role: "user", content: "Hello" }],
tools: {},
})
for await (const _ of stream.fullStream) {
}
const capture = await request
const body = capture.body
@@ -766,12 +847,13 @@ describe("session.llm.stream", () => {
model: { providerID: ProviderID.make("openai"), modelID: resolved.id },
} satisfies MessageV2.User
await drain({
const stream = await LLM.stream({
user,
sessionID,
model: resolved,
agent,
system: ["You are a helpful assistant."],
abort: new AbortController().signal,
messages: [
{
role: "user",
@@ -789,6 +871,9 @@ describe("session.llm.stream", () => {
tools: {},
})
for await (const _ of stream.fullStream) {
}
const capture = await request
expect(capture.url.pathname.endsWith("/responses")).toBe(true)
},
@@ -887,16 +972,20 @@ describe("session.llm.stream", () => {
model: { providerID: ProviderID.make("minimax"), modelID: ModelID.make("MiniMax-M2.5") },
} satisfies MessageV2.User
await drain({
const stream = await LLM.stream({
user,
sessionID,
model: resolved,
agent,
system: ["You are a helpful assistant."],
abort: new AbortController().signal,
messages: [{ role: "user", content: "Hello" }],
tools: {},
})
for await (const _ of stream.fullStream) {
}
const capture = await request
const body = capture.body
@@ -984,16 +1073,20 @@ describe("session.llm.stream", () => {
model: { providerID: ProviderID.make(providerID), modelID: resolved.id },
} satisfies MessageV2.User
await drain({
const stream = await LLM.stream({
user,
sessionID,
model: resolved,
agent,
system: ["You are a helpful assistant."],
abort: new AbortController().signal,
messages: [{ role: "user", content: "Hello" }],
tools: {},
})
for await (const _ of stream.fullStream) {
}
const capture = await request
const body = capture.body
const config = body.generationConfig as

View File

@@ -0,0 +1,152 @@
import { describe, expect } from "bun:test"
import { Effect, Layer } from "effect"
import * as CrossSpawnSpawner from "../../src/effect/cross-spawn-spawner"
import { AppFileSystem } from "../../src/filesystem"
import { Instance } from "../../src/project/instance"
import { Shell } from "../../src/shell/shell"
import { ShellJob } from "../../src/shell-job"
import { provideTmpdirInstance } from "../fixture/fixture"
import { testEffect } from "../lib/effect"
const it = testEffect(
Layer.mergeAll(
CrossSpawnSpawner.defaultLayer,
ShellJob.layer.pipe(Layer.provide(CrossSpawnSpawner.defaultLayer), Layer.provide(AppFileSystem.defaultLayer)),
),
)
const quote = (text: string) => `"${text}"`
const squote = (text: string) => `'${text}'`
const shell = () => Shell.name(Shell.acceptable())
const evalarg = (text: string) => (shell() === "cmd" ? quote(text) : squote(text))
const node = (script: string) => {
const text = `${quote(process.execPath.replaceAll("\\", "/"))} -e ${evalarg(script)}`
if (shell() === "powershell" || shell() === "pwsh") return `& ${text}`
return text
}
const alive = (pid: number) => {
try {
process.kill(pid, 0)
return true
} catch {
return false
}
}
describe("shell-job", () => {
it.live("captures output and persists spool files", () =>
provideTmpdirInstance((dir) =>
Effect.gen(function* () {
const jobs = yield* ShellJob.Service
const job = yield* jobs.start({
command: node('process.stdout.write("ok")'),
cwd: dir,
title: "ok",
})
const done = yield* jobs.wait({ id: job.id })
const out = yield* jobs.output({ id: job.id })
expect(done).toBeDefined()
expect(done?.status).toBe("completed")
expect(done?.pid).toBeGreaterThan(0)
expect(out).toEqual({ text: "ok", cursor: 2, done: true })
const log = yield* Effect.promise(() => Bun.file(done!.output_path).text())
const meta = yield* Effect.promise(() => Bun.file(done!.meta_path).json())
expect(log).toBe("ok")
expect(meta).toMatchObject({
id: done!.id,
status: "completed",
title: "ok",
cursor: 2,
})
}),
),
)
it.live("reads output incrementally with a cursor", () =>
provideTmpdirInstance((dir) =>
Effect.gen(function* () {
const jobs = yield* ShellJob.Service
const job = yield* jobs.start({
command: node(
'process.stdout.write("a"); setTimeout(() => process.stdout.write("b"), 200); setTimeout(() => process.exit(0), 350)',
),
cwd: dir,
})
yield* Effect.sleep("100 millis")
const a = yield* jobs.output({ id: job.id })
const done = yield* jobs.wait({ id: job.id })
const b = yield* jobs.output({ id: job.id, cursor: a?.cursor ?? 0 })
expect(a).toEqual({ text: "a", cursor: 1, done: false })
expect(done?.status).toBe("completed")
expect(b).toEqual({ text: "b", cursor: 2, done: true })
}),
),
)
it.live("marks non-zero exits as failed", () =>
provideTmpdirInstance((dir) =>
Effect.gen(function* () {
const jobs = yield* ShellJob.Service
const job = yield* jobs.start({
command: node('process.stderr.write("bad"); process.exit(7)'),
cwd: dir,
})
const done = yield* jobs.wait({ id: job.id })
const out = yield* jobs.output({ id: job.id })
expect(done).toBeDefined()
expect(done?.status).toBe("failed")
expect(done?.exit_code).toBe(7)
expect(out?.text).toBe("bad")
expect(out?.done).toBe(true)
}),
),
)
it.live("kills a running job and returns final state", () =>
provideTmpdirInstance((dir) =>
Effect.gen(function* () {
const jobs = yield* ShellJob.Service
const job = yield* jobs.start({
command: node("setInterval(() => {}, 1000)"),
cwd: dir,
})
yield* Effect.sleep("50 millis")
const done = yield* jobs.kill(job.id)
expect(done).toBeDefined()
expect(done?.status).toBe("killed")
expect(done?.exit_code).toBeNull()
}),
),
)
it.live("kills running jobs when the instance is disposed", () => {
if (process.platform === "win32") return Effect.void
return provideTmpdirInstance((dir) =>
Effect.gen(function* () {
const jobs = yield* ShellJob.Service
const job = yield* jobs.start({
command: node("setInterval(() => {}, 1000)"),
cwd: dir,
})
expect(job.pid).toBeGreaterThan(0)
yield* Effect.sleep("50 millis")
expect(alive(job.pid!)).toBe(true)
yield* Effect.promise(() => Instance.dispose())
yield* Effect.sleep("100 millis")
expect(alive(job.pid!)).toBe(false)
}),
)
})
})

View File

@@ -1,81 +0,0 @@
import { describe, expect } from "bun:test"
import path from "path"
import { Cause, Effect, Exit, Layer } from "effect"
import { GlobTool } from "../../src/tool/glob"
import { SessionID, MessageID } from "../../src/session/schema"
import * as CrossSpawnSpawner from "../../src/effect/cross-spawn-spawner"
import { Ripgrep } from "../../src/file/ripgrep"
import { AppFileSystem } from "../../src/filesystem"
import { Truncate } from "../../src/tool/truncate"
import { Agent } from "../../src/agent/agent"
import { provideTmpdirInstance } from "../fixture/fixture"
import { testEffect } from "../lib/effect"
const it = testEffect(
Layer.mergeAll(
CrossSpawnSpawner.defaultLayer,
AppFileSystem.defaultLayer,
Ripgrep.defaultLayer,
Truncate.defaultLayer,
Agent.defaultLayer,
),
)
const ctx = {
sessionID: SessionID.make("ses_test"),
messageID: MessageID.make(""),
callID: "",
agent: "build",
abort: AbortSignal.any([]),
messages: [],
metadata: () => Effect.void,
ask: () => Effect.void,
}
describe("tool.glob", () => {
it.live("matches files from a directory path", () =>
provideTmpdirInstance((dir) =>
Effect.gen(function* () {
yield* Effect.promise(() => Bun.write(path.join(dir, "a.ts"), "export const a = 1\n"))
yield* Effect.promise(() => Bun.write(path.join(dir, "b.txt"), "hello\n"))
const info = yield* GlobTool
const glob = yield* info.init()
const result = yield* glob.execute(
{
pattern: "*.ts",
path: dir,
},
ctx,
)
expect(result.metadata.count).toBe(1)
expect(result.output).toContain(path.join(dir, "a.ts"))
expect(result.output).not.toContain(path.join(dir, "b.txt"))
}),
),
)
it.live("rejects exact file paths", () =>
provideTmpdirInstance((dir) =>
Effect.gen(function* () {
const file = path.join(dir, "a.ts")
yield* Effect.promise(() => Bun.write(file, "export const a = 1\n"))
const info = yield* GlobTool
const glob = yield* info.init()
const exit = yield* glob
.execute(
{
pattern: "*.ts",
path: file,
},
ctx,
)
.pipe(Effect.exit)
expect(Exit.isFailure(exit)).toBe(true)
if (Exit.isFailure(exit)) {
const err = Cause.squash(exit.cause)
expect(err instanceof Error ? err.message : String(err)).toContain("glob path must be a directory")
}
}),
),
)
})

View File

@@ -90,25 +90,4 @@ describe("tool.grep", () => {
}),
),
)
it.live("supports exact file paths", () =>
provideTmpdirInstance((dir) =>
Effect.gen(function* () {
const file = path.join(dir, "test.txt")
yield* Effect.promise(() => Bun.write(file, "line1\nline2\nline3"))
const info = yield* GrepTool
const grep = yield* info.init()
const result = yield* grep.execute(
{
pattern: "line2",
path: file,
},
ctx,
)
expect(result.metadata.matches).toBe(1)
expect(result.output).toContain(file)
expect(result.output).toContain("Line 2: line2")
}),
),
)
})