Compare commits

..

265 Commits

Author SHA1 Message Date
opencode-agent[bot]
36aadf7356 chore: update nix node_modules hashes 2026-05-15 23:51:35 +00:00
opencode-agent[bot]
1dd835216a Fix beta integration 2026-05-15 23:37:41 +00:00
opencode-agent[bot]
13cbaffb6e Apply PR #27114: Preview native LLM runtime stack 2026-05-15 23:37:13 +00:00
opencode-agent[bot]
f896576d18 Apply PR #26949: perf(app): virtualize session timeline rows 2026-05-15 23:33:19 +00:00
opencode-agent[bot]
41509ad200 Apply PR #26821: core: reduce prompts 2026-05-15 23:33:19 +00:00
opencode-agent[bot]
836bcf21a9 Apply PR #26596: feat(tui): expand pasted summaries on click 2026-05-15 23:32:37 +00:00
opencode-agent[bot]
e4ed15c5be Apply PR #26387: tui: optimistically render submitted prompts 2026-05-15 23:32:37 +00:00
opencode-agent[bot]
940e55883f Apply PR #26246: use keymap state for layer visibility 2026-05-15 23:31:41 +00:00
opencode-agent[bot]
0dd67f5603 Apply PR #12633: feat(tui): add auto-accept mode for permission requests 2026-05-15 23:28:27 +00:00
opencode-agent[bot]
b0f97260b3 Apply PR #11710: feat: Add the ability to include cleared prompts in the history, toggled by a KV-persisted command palette item (resolves #11489) 2026-05-15 23:25:36 +00:00
Dax
09549661e1 Fix npm CLI binary installation (#27801) 2026-05-15 18:43:37 -04:00
opencode-agent[bot]
da495fd2e0 chore: generate 2026-05-15 22:09:43 +00:00
vimtor
85cd447910 chore: reduce alerts noise 2026-05-16 00:07:59 +02:00
Sebastian
0f31fd631b Fix multiline mentions (#27649) 2026-05-15 23:04:20 +02:00
Sebastian
aa07e21945 handle undefined tips (#27635) 2026-05-15 23:04:00 +02:00
Shoubhit Dash
f060874b29 feat(tui): add minimal thinking mode with click-to-expand (#27623) 2026-05-16 02:09:58 +05:30
vimtor
f21c582db9 chore: reduce alerting noise 2026-05-15 22:18:50 +02:00
Shoubhit Dash
65f96a5851 refactor(instance): retire WithInstance adapter (#27782) 2026-05-16 01:30:07 +05:30
Aiden Cline
48122b31cc fix(tool): bridge custom tool zod metadata (#27770) 2026-05-15 14:50:21 -05:00
opencode-agent[bot]
0df2f5b45f chore: generate 2026-05-15 19:48:45 +00:00
Shoubhit Dash
499e8e4b78 test(instance): add effect-native fixture helpers (#27781) 2026-05-16 01:17:37 +05:30
Shoubhit Dash
f33b4455a1 feat(tui): enable pinned session switching (#27780) 2026-05-16 01:10:16 +05:30
Shoubhit Dash
a24abd2b11 refactor(lsp): require explicit instance context (#27767) 2026-05-16 00:46:27 +05:30
opencode-agent[bot]
d44bef2107 chore: generate 2026-05-15 18:45:24 +00:00
Shoubhit Dash
f99339e525 fix(tui): keep session switching pinned-only (#27775) 2026-05-16 00:14:07 +05:30
Shoubhit Dash
2b0e72ab79 refactor(workspace): centralize adapter invocation (#27768) 2026-05-15 23:59:01 +05:30
Shoubhit Dash
2fdee50b3b refactor(acp): extract runtime reentry (#27769) 2026-05-15 23:58:52 +05:30
opencode-agent[bot]
48293c5271 chore: generate 2026-05-15 17:37:07 +00:00
Shoubhit Dash
0c9cfe923f refactor(instance): remove legacy runtime fallback (#27757) 2026-05-15 23:05:44 +05:30
opencode-agent[bot]
9975c1ed1c chore: generate 2026-05-15 15:12:20 +00:00
Aiden Cline
ef7d801271 fix(tool): preserve custom tool arg descriptions (#27750)
Co-authored-by: khimaros <231498+khimaros@users.noreply.github.com>
2026-05-15 10:11:01 -05:00
opencode-agent[bot]
eb630075c3 chore: generate 2026-05-15 15:01:55 +00:00
Shoubhit Dash
a2392ca60d refactor(worktree): provide runtime reentry refs (#27754) 2026-05-15 20:30:29 +05:30
opencode-agent[bot]
f9371eb66c chore: generate 2026-05-15 14:36:04 +00:00
Shoubhit Dash
fa9a2cb24d refactor(instance): remove remaining bind call sites (#27731) 2026-05-15 20:04:42 +05:30
Victor Navarro
2d90f325fc ci: catch provider errors across all opencode tiers (#27495) 2026-05-15 16:31:59 +02:00
Aiden Cline
c2ffd7cf14 fix: markdown table rendering (#27747) 2026-05-15 09:22:24 -05:00
vimtor
104f5d5a14 chore: exclude provider from triggers 2026-05-15 16:01:11 +02:00
Shoubhit Dash
1c7c03332e test(workspace): avoid legacy instance reads (#27727) 2026-05-15 17:07:21 +05:30
opencode-agent[bot]
984eefa6f8 chore: generate 2026-05-15 11:02:14 +00:00
Shoubhit Dash
bf64f8cbb5 refactor(cli): dispose bootstrap instance explicitly (#27721) 2026-05-15 16:30:54 +05:30
opencode-agent[bot]
727a83aa7a chore: generate 2026-05-15 10:46:06 +00:00
Shoubhit Dash
e65383810a refactor(tool): read repo overview directory from instance state (#27717) 2026-05-15 16:14:49 +05:30
Shoubhit Dash
12b666e2c9 refactor(project): import instance context directly (#27714) 2026-05-15 15:59:56 +05:30
Shoubhit Dash
eb5ef1c073 refactor(flags): remove unused flag exports (#27709) 2026-05-15 15:35:24 +05:30
Shoubhit Dash
356f684186 refactor(flags): migrate skip migrations flag (#27705) 2026-05-15 14:54:29 +05:30
Shoubhit Dash
7b370406a9 refactor(flags): migrate lsp download flag (#27699) 2026-05-15 14:35:31 +05:30
Shoubhit Dash
202cc863b4 refactor(flags): migrate claude code prompt flag (#27690) 2026-05-15 14:17:04 +05:30
Shoubhit Dash
22cb0395e2 refactor(flags): migrate external skills flag (#27685) 2026-05-15 13:24:56 +05:30
Shoubhit Dash
2d6bedecd4 refactor(flags): migrate output token max to runtime flags (#27680) 2026-05-15 13:07:35 +05:30
opencode-agent[bot]
2080390ca6 chore: generate 2026-05-15 07:36:10 +00:00
Kagura
1ac3f09468 fix(watcher): resolve symlinked .git path before subscribing (#27016)
Co-authored-by: Simon Klee <hello@simonklee.dk>
2026-05-15 09:34:53 +02:00
Aiden Cline
ca8f578f2f ci: skip previously cleaned PRs (#27670) 2026-05-15 00:23:09 -05:00
Aiden Cline
d59d99665b ci: Automate PR cleanup (#27667) 2026-05-14 23:47:59 -05:00
opencode
c43edc5b71 sync release versions for v1.15.0 2026-05-15 04:03:54 +00:00
Dax Raad
7a012cac08 fix(tool): ignore invalid custom tool exports 2026-05-14 23:36:28 -04:00
James Long
af06e52708 fix(session): ignore instruction lookup errors (#27656) 2026-05-14 23:12:30 -04:00
Dax Raad
f807152724 core: fix event projector lookup to use versioned type keys
Fixes a bug where projectors were stored with Definition object references
as Map keys but lookups were failing due to object identity mismatches.
Now uses versioned type strings as keys, ensuring events are correctly
matched to their projectors during sync processing.

This fixes issues where certain events would not be properly projected
to the read model, causing stale or missing data in views.
2026-05-14 22:52:03 -04:00
Dax Raad
b0ea7a5aa7 more test fixes 2026-05-14 22:45:09 -04:00
opencode-agent[bot]
34b1be5bcd chore: generate 2026-05-15 02:34:06 +00:00
Dax Raad
16639eee76 test fixes 2026-05-14 22:32:38 -04:00
opencode-agent[bot]
10c8493940 chore: generate 2026-05-15 02:14:56 +00:00
Dax Raad
a50ff72f3f fix: remove debug logging 2026-05-14 22:13:15 -04:00
LukeParkerDev
8d08f68072 Merge remote-tracking branch 'upstream/dev' into perf/session-timeline-virtua 2026-05-15 12:07:32 +10:00
Dax Raad
5f4e5e6896 more typecheck fixes 2026-05-14 22:05:12 -04:00
Dax Raad
f39cf911d7 fix missing event types in sdk 2026-05-14 21:57:25 -04:00
Brendan Allan
f179dcbf02 fix(app): only run session.updated archive logic if archive state changes (#27637) 2026-05-15 09:42:47 +08:00
LukeParkerDev
e8355b45bf Revert "fix(app): use legacy SDK for global events"
This reverts commit e22ede26ee.
2026-05-15 11:36:00 +10:00
Kit Langton
4c7692ed30 feat(llm): add native runtime preview 2026-05-14 21:33:39 -04:00
LukeParkerDev
e22ede26ee fix(app): use legacy SDK for global events 2026-05-15 11:17:54 +10:00
LukeParkerDev
434a13b582 Merge remote-tracking branch 'upstream/dev' into perf/session-timeline-virtua 2026-05-15 11:00:20 +10:00
opencode-agent[bot]
fd6a8520b8 chore: generate 2026-05-15 00:52:06 +00:00
Dax
e11e089e42 Add Effect-native core event system (#27415) 2026-05-15 00:50:23 +00:00
Luna Seemann
73cdba959b feat(desktop): auto-hide menu bar on Linux and Windows (#27618) 2026-05-15 08:48:29 +08:00
LukeParkerDev
8d75ad395d fix(app): keep timeline anchored during resize 2026-05-15 10:46:05 +10:00
opencode
4e7a60dac6 sync release versions for v1.14.51 2026-05-15 00:39:54 +00:00
LukeParkerDev
e9a99c8ef3 fix(app): keep streaming timeline anchored 2026-05-15 10:08:36 +10:00
opencode-agent[bot]
e62ebd8fec chore: generate 2026-05-15 00:02:04 +00:00
Kit Langton
195f592640 refactor(server): simplify listener lifecycle (#27413) 2026-05-15 00:00:52 +00:00
opencode-agent[bot]
78769010a1 chore: generate 2026-05-14 23:46:57 +00:00
Kit Langton
4e143e3a3e test(lib): promote pollWithTimeout/awaitWithTimeout helpers (#27626) 2026-05-14 23:45:32 +00:00
opencode-agent[bot]
dab567aa2d chore: generate 2026-05-14 23:34:42 +00:00
Kit Langton
9d35b04e13 test(acp): replace fixed sleeps with pollUntil in event-subscription (#27624) 2026-05-14 23:33:06 +00:00
Kit Langton
273ab56949 test(bus): fix flaky subscriber races with readiness latch (#27625) 2026-05-14 19:32:25 -04:00
Kit Langton
302ba0ca0b test(session): de-flake shell-cancel tests by waiting for busy state (#27622) 2026-05-14 19:24:09 -04:00
LukeParkerDev
9b59d5fb31 fix(app): preserve timeline row state 2026-05-15 08:57:17 +10:00
Shoubhit Dash
d35e09f1fc test(workspace): use runtime flags in workspace tests (#27612) 2026-05-15 04:19:39 +05:30
Shoubhit Dash
fc34c74567 refactor(flags): move channel db flag to runtime flags (#27615) 2026-05-15 04:09:10 +05:30
Shoubhit Dash
cb4f5cdea9 refactor(flags): move auto share to runtime flags (#27611) 2026-05-15 03:58:26 +05:30
nv-kasikritc
d34a0194ec feat(provider): add NVIDIA endpoints origin header (#27394) 2026-05-14 17:21:58 -05:00
Shoubhit Dash
43310f4d8c refactor(flags): move embedded web ui flag to runtime flags (#27613) 2026-05-15 03:51:29 +05:30
Shoubhit Dash
e22cfa435a refactor(lsp): move ty flag to runtime flags (#27610) 2026-05-15 03:40:30 +05:30
opencode-agent[bot]
93b1ccc029 chore: generate 2026-05-14 22:00:48 +00:00
Shoubhit Dash
faca2b90c1 refactor(flags): migrate icon discovery runtime flag (#27609) 2026-05-15 03:24:14 +05:30
Shoubhit Dash
76ff18afde refactor(format): move oxfmt flag to runtime flags (#27608) 2026-05-15 03:03:37 +05:30
opencode-agent[bot]
9914c9af17 chore: generate 2026-05-14 21:32:52 +00:00
Shoubhit Dash
f202226bbc refactor(flags): move bash timeout to runtime flags (#27607) 2026-05-15 02:49:14 +05:30
Shoubhit Dash
34198f422c refactor(provider): use runtime flag for experimental models (#27606) 2026-05-15 02:48:01 +05:30
Shoubhit Dash
cccdeef294 refactor(flags): migrate claude code skills flag to RuntimeFlags (#27605) 2026-05-15 02:47:26 +05:30
Musa
83c145f889 fix(plugin): scope digitalocean oauth to genai (#27599) 2026-05-14 15:20:34 -05:00
Kit Langton
d353a6bc24 fix(worktree): accept missing create payload (#27582) 2026-05-14 14:25:22 -04:00
bo-tato
d25cc42d21 docs(app): stale reference to removed multi-edit tool (#27579) 2026-05-14 13:10:01 -05:00
opencode-agent[bot]
6039b894c5 chore: generate 2026-05-14 18:05:32 +00:00
Kit Langton
b4fc5ef071 refactor(http-recorder): tighten cassette safety, fix WS leaks + docs (#26730) 2026-05-14 18:03:22 +00:00
opencode-agent[bot]
f6c8e35383 chore: generate 2026-05-14 17:58:35 +00:00
Kit Langton
94564f3588 fix(session): prevent double auto-compaction from filterCompacted reorder (#27545) 2026-05-14 13:56:12 -04:00
Kit Langton
855bda8384 test(question): wait on question events (#27124) 2026-05-14 13:23:47 -04:00
opencode-agent[bot]
756488d534 chore: generate 2026-05-14 16:42:18 +00:00
Shoubhit Dash
22de34c4de feat: add experimental background subagents (#27084) 2026-05-14 22:10:15 +05:30
Adam
bdb0c16a93 chore: update web stats 2026-05-14 11:26:59 -05:00
Sameer Kankute
7f7eb2e7f8 fix(provider): remove LiteLLM workarounds ported upstream, requires LiteLLM v1.85.0-rc.2+ (#26819)
Co-authored-by: Cursor <cursoragent@cursor.com>
2026-05-14 11:26:07 -05:00
opencode-agent[bot]
e15fd0bb93 chore: generate 2026-05-14 13:33:44 +00:00
opencode-agent[bot]
8f90697df8 chore: generate 2026-05-14 13:32:19 +00:00
opencode-agent[bot]
17af25d1c1 chore: generate 2026-05-14 13:30:59 +00:00
Kit Langton
3c81326a5e docs(effect): refresh TODO with shipped P0 and RF work (#27536) 2026-05-14 09:29:32 -04:00
opencode-agent[bot]
9f8d8f5b0e chore: generate 2026-05-14 12:40:10 +00:00
OpeOginni
337993d53e feat(desktop): add mcp client registration status and authentication handling (#27525) 2026-05-14 20:38:52 +08:00
Shoubhit Dash
e26abd8da9 fix(tool): close shell truncation stream (#27517) 2026-05-14 16:34:42 +05:30
Brendan Allan
e1ed51b7fc cleanup MessageTimeline
- Separate message timeline data handling into .data.ts file
- Use effect's Data and Equal modules for managing Timeline parts
- Handle mobileChanges view outside of MessageTimeline
2026-05-14 18:40:05 +08:00
Shoubhit Dash
8c1ce0b80c refactor(flags): simplify tui plugin runtime flags (#27506) 2026-05-14 15:56:02 +05:30
Brendan Allan
f8c3f560d4 fix(desktop): await execFilePromise and read stdout properly (#27499) 2026-05-14 17:52:23 +08:00
Shoubhit Dash
7e43d3e3f5 refactor(lsp): type initialize errors (#27494) 2026-05-14 15:20:02 +05:30
opencode-agent[bot]
52db7a76e2 chore: update nix node_modules hashes 2026-05-14 09:28:30 +00:00
Shoubhit Dash
be6e7b309e refactor(provider): type init errors (#27484) 2026-05-14 14:48:58 +05:30
Simon Klee
0af242974c deps: Upgrade OpenTUI to 0.2.10 (#27491) 2026-05-14 09:14:03 +00:00
Shoubhit Dash
27ac53aaac fix(server): stop exposing named defects (#27471) 2026-05-14 12:51:05 +05:30
Shoubhit Dash
78015571bf refactor(server): centralize session busy mapping (#27473) 2026-05-14 12:50:36 +05:30
Aiden Cline
e76cf967e6 fix(session): finalize interrupted assistant messages (#27254) 2026-05-14 01:19:11 -05:00
opencode-agent[bot]
c2723b5ea0 chore: generate 2026-05-14 04:25:32 +00:00
Frederik
9675579796 fix: bug encountered when using azure gpt-5.5 w/ completions api (#26222) 2026-05-13 23:24:16 -05:00
opencode-agent[bot]
4d8368970a chore: update nix node_modules hashes 2026-05-14 04:20:42 +00:00
Nikhil Patel
2a7af6acd8 fix(tui): preserve text selection on question prompt options (#24988)
Co-authored-by: Aiden Cline <aidenpcline@gmail.com>
2026-05-13 23:17:59 -05:00
opencode-agent[bot]
bfd707abc9 chore: generate 2026-05-14 04:07:30 +00:00
Aiden Cline
981e00971a fix: image resizer wasm loading, reenable image resizing (#26805) 2026-05-13 23:06:07 -05:00
Aiden Cline
c50d2b3656 Refactor event HTTP API route modules (#27441) 2026-05-13 22:41:17 -05:00
opencode
ddad0988e7 sync release versions for v1.14.50 2026-05-14 03:03:23 +00:00
Kit Langton
cda8cc7285 test(httpapi): simplify event stream regression coverage (#27427) 2026-05-14 02:18:40 +00:00
Kit Langton
b928a1fff9 fix(httpapi): preserve event stream context (#27425)
Co-authored-by: Aiden Cline <aidenpcline@gmail.com>
Co-authored-by: James Long <longster@gmail.com>
2026-05-13 22:02:30 -04:00
Kit Langton
04286d0415 docs(effect): plan Instance deletion path (#27424) 2026-05-13 21:58:54 -04:00
opencode-agent[bot]
33bb33ba90 chore: generate 2026-05-14 01:55:00 +00:00
Sebastian
b0ade40265 flip back to markdown renderable (#27421) 2026-05-14 03:53:48 +02:00
Kit Langton
681594b551 refactor(storage): remove not found wire serializer (#27416) 2026-05-13 21:39:50 -04:00
Kit Langton
edf7649400 fix(session): type busy errors (#27410) 2026-05-14 01:28:04 +00:00
Kit Langton
3fc7486d15 test(session): fix shell-cancel race when trap hasn't installed yet (#27408) 2026-05-14 01:10:40 +00:00
Kit Langton
8e353584c7 test(format): remove formatter check sleeps (#27407) 2026-05-13 21:02:34 -04:00
Sebastian
5c35ea2181 notification docs (#27406) 2026-05-14 03:01:25 +02:00
opencode-agent[bot]
faf8713053 chore: generate 2026-05-14 00:59:33 +00:00
Dax
16c457e712 refactor(core): move models.dev into core (#27347) 2026-05-13 20:58:24 -04:00
Kit Langton
9818c9e8d0 fix(provider): make small model fallback optional (#27405) 2026-05-14 00:44:34 +00:00
Kit Langton
5e41dbbcbf test(effect): use Effect sleep in instance state tests (#27404) 2026-05-13 20:43:46 -04:00
Kit Langton
ba5c8d3822 fix(llm): preserve tool error defects (#27403) 2026-05-13 20:43:32 -04:00
opencode-agent[bot]
10c90eb445 chore: generate 2026-05-14 00:33:32 +00:00
Kit Langton
aa8a41d1b8 effect(patch,tool): migrate patch/index and tool/read to AppFileSystem (#27155) 2026-05-13 20:32:19 -04:00
Kit Langton
3f33be1928 effect(server): typed errors in session/sync handlers, fix concurrency (#27146) 2026-05-13 20:31:27 -04:00
Kit Langton
42e6b7d541 effect(core): track stderr truncation; polish AppProcess callers (#27353) 2026-05-13 20:31:03 -04:00
LukeParkerDev
ade6287241 Merge remote-tracking branch 'origin/perf/session-timeline-virtua' into perf/session-timeline-virtua 2026-05-14 10:28:43 +10:00
LukeParkerDev
4a72af3ed7 fix(app): preserve virtual timeline row state 2026-05-14 10:27:10 +10:00
Kit Langton
ccb207f946 effect(util): migrate filesystem callers to AppFileSystem.Service (#27152) 2026-05-13 20:25:37 -04:00
LukeParkerDev
3fd1bddca1 fix: defer timeline diff rendering 2026-05-14 09:59:28 +10:00
Kit Langton
de1e0b5d6d test(workspace): effectify sync state cases (#27400) 2026-05-13 19:49:03 -04:00
Aiden Cline
df3895d74f cleanup: make smallOptions rely on variants (#27390) 2026-05-13 18:30:36 -05:00
Kit Langton
55e0af1405 fix(provider): type model not found errors (#27334) 2026-05-13 22:23:09 +00:00
Kit Langton
5182a3698d test(workspace): use Effect for local session warp cases (#27393) 2026-05-13 22:22:51 +00:00
opencode
73e1de4513 sync release versions for v1.14.49 2026-05-13 22:18:40 +00:00
Frank
44b432c3fd sync 2026-05-13 18:05:06 -04:00
Kit Langton
0d8c9f3437 docs: add LayerMap example (#27388) 2026-05-13 17:46:47 -04:00
LukeParkerDev
0d57ecd879 Merge remote-tracking branch 'origin/perf/session-timeline-virtua' into perf/session-timeline-virtua 2026-05-14 07:42:09 +10:00
LukeParkerDev
f1dba7a9b8 Merge remote-tracking branch 'upstream/dev' into perf/session-timeline-virtua 2026-05-14 07:39:37 +10:00
opencode-agent[bot]
0d074492df chore: update nix node_modules hashes 2026-05-13 21:20:10 +00:00
Sebastian
3b7a5e783d fix keymap fallback priority and TUI config diagnostics (#27384) 2026-05-13 23:00:48 +02:00
Frank
c197fd92b7 sync 2026-05-13 15:58:34 -04:00
Shoubhit Dash
9ee1f6ceba fix(server): map busy sessions in http handlers (#27375) 2026-05-14 01:02:07 +05:30
Shoubhit Dash
20cec91550 fix(provider): restore model suggestions (#27372) 2026-05-14 00:38:38 +05:30
Aiden Cline
22a5e6cc50 fix(run): restore non-interactive exit behavior (#27371) 2026-05-13 18:45:34 +00:00
Shoubhit Dash
52f9bcbb82 refactor(flags): route installation client through runtime flags (#27369) 2026-05-14 00:10:31 +05:30
Shoubhit Dash
a4ebb07c25 refactor(flags): route llm client through runtime flags (#27368) 2026-05-14 00:09:53 +05:30
opencode-agent[bot]
7cc968b05d chore: generate 2026-05-13 16:47:21 +00:00
Frank
fa077b92b1 zen: update sticky session logic 2026-05-13 12:45:11 -04:00
Kit Langton
8ad3a4b217 test(util): migrate log cleanup test to Effect (#27357) 2026-05-13 16:43:23 +00:00
Kit Langton
533495ae20 test(mcp): migrate OAuth auto-connect tests (#27356) 2026-05-13 16:38:37 +00:00
Kit Langton
f0635e365f test(session): use Effect polling in processor tests (#27354) 2026-05-13 16:33:19 +00:00
Kit Langton
25de3e407b test(acp): use shared instance fixture for event tests (#27351) 2026-05-13 12:30:13 -04:00
Frank
655b25bccf sync 2026-05-13 12:05:30 -04:00
Kit Langton
e5d13d9519 effect(git): migrate to AppProcess.run (#27190) 2026-05-13 12:04:51 -04:00
Kit Langton
5cdbb7505e effect(installation): migrate to AppProcess.run (#27188) 2026-05-13 11:54:29 -04:00
Kit Langton
e5319846ad test(server): migrate pty websocket input test (#27348) 2026-05-13 15:43:09 +00:00
Kit Langton
832aa94977 effect(worktree): migrate to AppProcess.run (#27186) 2026-05-13 11:39:35 -04:00
Luke Parker
fba26d1f9c Merge pull request #4 from Hona/fix/virtual-timeline-anchor-1778643000
fix(app): anchor virtual timeline to bottom
2026-05-13 13:57:59 +10:00
LukeParkerDev
44c0ec7847 fix(app): simplify timeline bottom anchor 2026-05-13 13:54:07 +10:00
LukeParkerDev
d4db264a1a fix(app): anchor virtual timeline to bottom 2026-05-13 13:43:45 +10:00
Luke Parker
5c0d9ed030 Merge pull request #3 from Hona/fix/virtual-timeline-cache-1778638381
fix(app): restore virtual timeline cache
2026-05-13 13:10:36 +10:00
LukeParkerDev
43234b8b0c fix(app): restore virtual timeline cache 2026-05-13 13:08:18 +10:00
LukeParkerDev
6fdb256ac2 Merge remote-tracking branch 'upstream/dev' into perf/session-timeline-virtua 2026-05-13 12:14:19 +10:00
LukeParkerDev
861fe245c8 fix(app): restore virtual timeline scrollbar 2026-05-13 10:29:26 +10:00
LukeParkerDev
12c6c0925f fix(app): guard virtualizer scroll root 2026-05-13 10:16:03 +10:00
LukeParkerDev
240201b139 fix(app): align virtual timeline scrolling 2026-05-12 15:43:48 +10:00
LukeParkerDev
dcbe29c7c6 fix(app): stabilize session timeline virtualization 2026-05-12 15:09:25 +10:00
LukeParkerDev
bd14ab0174 fix(app): keep virtual timeline rows reactive 2026-05-12 09:54:24 +10:00
LukeParkerDev
f59b41f1e0 perf(app): improve timeline reactivity architecture using mapArray 2026-05-12 09:44:01 +10:00
LukeParkerDev
7ab318a7a5 perf(app): virtualize session timeline rows 2026-05-12 08:57:21 +10:00
Ariane Emory
9332560ffd Merge branch 'dev' into feat/canceled-prompts-in-history 2026-05-11 15:49:47 -04:00
Aiden Cline
21bc8ac444 rm example 2026-05-11 02:26:22 -05:00
Aiden Cline
02d9727bbf reduce shell 2026-05-11 02:25:52 -05:00
Aiden Cline
5569e85734 rm dup prompt 2026-05-11 01:59:11 -05:00
Aiden Cline
daef20230c Optimize task tool prompt 2026-05-11 01:22:19 -05:00
Aiden Cline
2f7ed2f036 chore(opencode): use ASCII punctuation in todowrite prompt
Replaces em dashes and unicode arrows with plain ASCII ('-' and '->')
to match the repo's default style.
2026-05-11 01:21:06 -05:00
Aiden Cline
1327f85793 chore(opencode): compress todowrite tool prompt
Rewrites the TodoWrite tool description to reduce token usage by ~75%
(1,380 -> 337 words) while preserving all behavioral guardrails.

Trimmed:
- Eight full example blocks (4 use / 4 skip) with reasoning paragraphs
  reduced to one-line example summaries.
- Redundant framing prose and duplicated rules.

Preserved:
- All use / skip trigger conditions.
- Task states (pending, in_progress, completed, cancelled) and the
  one-in_progress-at-a-time invariant.
- Anti-hallucination rule: only mark completed after the work is
  actually done (including any required verification).
- Follow-up handling for both blocked and successfully completed tasks.

Clarified:
- 'Distinct steps or actions (not just 3 tool calls for a single
  conceptual step)' avoids triggering todos for simple multi-grep
  lookups.
- Preserve user-provided commands verbatim (flags, args, order).
2026-05-11 01:13:23 -05:00
Ryan Vogel
5aad254f68 feat(tui): expand pasted summaries on click 2026-05-09 23:11:26 +00:00
Kit Langton
ecb6457ba4 test: cover TUI optimistic prompt sync 2026-05-08 14:28:00 -04:00
Kit Langton
de800a90e3 tui: simplify optimistic prompt reconciliation 2026-05-08 14:12:55 -04:00
Kit Langton
8e1d9512f1 tui: optimistically render submitted prompts 2026-05-08 13:53:17 -04:00
Sebastian Herrlinger
29b7871552 structure 2026-05-07 22:42:45 +02:00
Sebastian Herrlinger
968b6962bd move command palette 2026-05-07 22:35:18 +02:00
Sebastian Herrlinger
bb68dcb637 STASH 2 2026-05-07 22:23:48 +02:00
Sebastian Herrlinger
8a8c6301f1 STASH 2026-05-07 22:23:48 +02:00
Ariane Emory
4e8020b503 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-05-04 06:26:02 -04:00
Ariane Emory
09e4e5a184 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-04-23 21:55:13 -04:00
Ariane Emory
731c1e58f2 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-04-16 20:22:02 -04:00
Ariane Emory
c411d37484 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-04-12 04:22:06 -04:00
Ariane Emory
97a94571a4 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-04-03 09:19:12 -04:00
Ariane Emory
6652585a7f Merge branch 'dev' into feat/canceled-prompts-in-history 2026-03-24 11:17:40 -04:00
Ariane Emory
532b64c0d5 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-03-24 07:43:03 -04:00
Ariane Emory
eec4c775a7 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-03-23 21:10:21 -04:00
Ariane Emory
01e350449c Merge branch 'dev' into feat/canceled-prompts-in-history 2026-03-20 19:12:18 -04:00
Dax
5792a80a8c Merge branch 'dev' into feat/auto-accept-permissions 2026-03-20 10:46:31 -04:00
Dax Raad
db039db7f5 regen js sdk 2026-03-20 10:21:10 -04:00
Dax Raad
c1a3936b61 Merge remote-tracking branch 'origin/dev' into feat/auto-accept-permissions
# Conflicts:
#	packages/sdk/js/src/v2/gen/types.gen.ts
2026-03-20 10:20:26 -04:00
Ariane Emory
a9d9e4d9c4 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-03-20 03:35:16 -04:00
Ariane Emory
2531b2d3a9 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-03-13 11:47:39 -04:00
Ariane Emory
a718f86e0f Merge branch 'dev' into feat/canceled-prompts-in-history 2026-03-08 19:28:41 -04:00
Ariane Emory
f3efdff861 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-03-08 08:36:02 -04:00
Ariane Emory
955d8591df Merge branch 'dev' into feat/canceled-prompts-in-history 2026-03-05 18:24:19 -05:00
Ariane Emory
33b3388bf4 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-26 17:50:11 -05:00
Ariane Emory
716f40b128 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-26 01:36:39 -05:00
Ariane Emory
0b06ff1407 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-20 21:24:12 -05:00
Ariane Emory
01ff5b5390 Merge branch 'dev' into feat/canceled-prompts-in-history
# Conflicts:
#	packages/opencode/src/cli/cmd/tui/component/prompt/history.tsx
2026-02-20 02:16:02 -05:00
Ariane Emory
3d1b121e70 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-19 19:18:48 -05:00
Ariane Emory
b70629af27 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-18 19:10:26 -05:00
Ariane Emory
b7b016fa28 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-17 00:09:51 -05:00
Ariane Emory
5ba2d7e5f0 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-15 12:27:51 -05:00
Ariane Emory
459b22b83d Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-14 19:21:47 -05:00
Ariane Emory
377812b98a Merge dev into feat/canceled-prompts-in-history 2026-02-14 06:28:48 -05:00
Ariane Emory
5cc0901e38 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-13 09:37:11 -05:00
Ariane Emory
7fb6b589d1 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-12 18:29:23 -05:00
Ariane Emory
3f37b43e7d Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-11 12:46:47 -05:00
Ariane Emory
8805dfc849 fix: deduplicate prompt history entries
Avoid adding duplicate entries to prompt history when the same input
is appended multiple times (e.g., clearing with ctrl+c then restoring
via history navigation and clearing again).
2026-02-10 22:21:39 -05:00
Ariane Emory
ac5a5d8b16 Merge branch 'feat/canceled-prompts-in-history' of github.com:ariane-emory/opencode into feat/canceled-prompts-in-history 2026-02-10 16:37:55 -05:00
Ariane Emory
eaf94ed047 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-10 16:29:05 -05:00
Ariane Emory
b8031c5ae8 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-10 16:10:35 -05:00
Dax Raad
a531f3f36d core: run command build agent now auto-accepts file edits to reduce workflow interruptions while still requiring confirmation for bash commands 2026-02-07 20:00:09 -05:00
Dax Raad
bb3382311d tui: standardize autoedit indicator text styling to match other status labels 2026-02-07 19:57:45 -05:00
Dax Raad
ad545d0cc9 tui: allow auto-accepting only edit permissions instead of all permissions 2026-02-07 19:52:53 -05:00
Dax Raad
ac244b1458 tui: add searchable 'toggle' keywords to command palette and show current state in toggle titles 2026-02-07 17:03:34 -05:00
Dax Raad
f202536b65 tui: show enable/disable state in permission toggle and make it searchable by 'toggle permissions' 2026-02-07 16:57:48 -05:00
Dax Raad
405cc3f610 tui: streamline permission toggle command naming and add keyboard shortcut support
Rename 'Toggle autoaccept permissions' to 'Toggle permissions' for clarity
and move the command to the Agent category for better discoverability.
Add permission_auto_accept_toggle keybind to enable keyboard shortcut
toggling of auto-accept mode for permission requests.
2026-02-07 16:51:55 -05:00
Dax Raad
878c1b8c2d feat(tui): add auto-accept mode for permission requests
Add a toggleable auto-accept mode that automatically accepts all incoming
permission requests with a 'once' reply. This is useful for users who want
to streamline their workflow when they trust the agent's actions.

Changes:
- Add permission_auto_accept keybind (default: shift+tab) to config
- Remove default for agent_cycle_reverse (was shift+tab)
- Add auto-accept logic in sync.tsx to auto-reply when enabled
- Add command bar action to toggle auto-accept mode (copy: "Toggle autoaccept permissions")
- Add visual indicator showing 'auto-accept' when active
- Store auto-accept state in KV for persistence across sessions
2026-02-07 16:44:39 -05:00
Ariane Emory
d5dcadc000 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-07 13:34:42 -05:00
Ariane Emory
0c154e6a2f Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-06 15:59:50 -05:00
Ariane Emory
4f96975148 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-05 18:17:01 -05:00
Ariane Emory
eaba99711b Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-04 19:33:59 -05:00
Ariane Emory
f762125775 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-03 18:36:44 -05:00
Ariane Emory
ded6bb6513 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-02 21:23:28 -05:00
Ariane Emory
39332f5be6 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-01 22:33:29 -05:00
Ariane Emory
2c6ff35400 feat: add toggle to control whether cleared prompts are saved to history
Adds a toggle command in the System category that allows users to enable
or disable saving cleared prompts to history. The feature is disabled by
default to preserve existing behavior.

When enabled via the command palette ("Include cleared prompts in history"),
pressing Ctrl+C will save the current prompt to history before clearing it,
allowing users to navigate back with arrow keys.

The setting persists in kv.json.
2026-02-01 21:12:48 -05:00
Ariane Emory
738d6c8899 feat: save prompt to history when cleared with Ctrl+C
When users press Ctrl+C to clear the input field, the current prompt
is now saved to history before clearing. This allows users to navigate
back to cleared prompts using arrow keys, preventing loss of work.

Addresses #11489
2026-02-01 21:01:15 -05:00
410 changed files with 18743 additions and 7775 deletions

50
.github/workflows/close-prs.yml vendored Normal file
View File

@@ -0,0 +1,50 @@
name: close-prs
on:
schedule:
- cron: "0 22 * * *" # Daily at 10:00 PM UTC
workflow_dispatch:
inputs:
dry-run:
description: "Log matching PRs without closing them"
type: boolean
default: true
max-close:
description: "Maximum matching PRs to close"
type: string
required: false
default: "50"
jobs:
close:
runs-on: ubuntu-latest
timeout-minutes: 240
permissions:
contents: read
issues: write
pull-requests: write
steps:
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4.3.1
- uses: oven-sh/setup-bun@0c5077e51419868618aeaa5fe8019c62421857d6 # v2.2.0
with:
bun-version: latest
- name: Close old PRs without enough positive reactions
env:
GITHUB_TOKEN: ${{ github.token }}
run: |
max_close="${{ inputs['max-close'] }}"
if [ -z "$max_close" ]; then
max_close="50"
fi
args=("--threshold" "2" "--age-months" "1" "--sleep-ms" "20000" "--max-close" "$max_close")
if [ "${{ github.event_name }}" = "schedule" ]; then
args+=("--execute")
elif [ "${{ inputs['dry-run'] }}" = "false" ]; then
args+=("--execute")
fi
bun script/github/close-prs.ts "${args[@]}"

View File

@@ -1,235 +0,0 @@
name: close-stale-prs
on:
workflow_dispatch:
inputs:
dryRun:
description: "Log actions without closing PRs"
type: boolean
default: false
schedule:
- cron: "0 6 * * *"
permissions:
contents: read
issues: write
pull-requests: write
jobs:
close-stale-prs:
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- name: Close inactive PRs
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
const DAYS_INACTIVE = 60
const MAX_RETRIES = 3
// Adaptive delay: fast for small batches, slower for large to respect
// GitHub's 80 content-generating requests/minute limit
const SMALL_BATCH_THRESHOLD = 10
const SMALL_BATCH_DELAY_MS = 1000 // 1s for daily operations (≤10 PRs)
const LARGE_BATCH_DELAY_MS = 2000 // 2s for backlog (>10 PRs) = ~30 ops/min, well under 80 limit
const startTime = Date.now()
const cutoff = new Date(Date.now() - DAYS_INACTIVE * 24 * 60 * 60 * 1000)
const { owner, repo } = context.repo
const dryRun = context.payload.inputs?.dryRun === "true"
core.info(`Dry run mode: ${dryRun}`)
core.info(`Cutoff date: ${cutoff.toISOString()}`)
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms))
}
async function withRetry(fn, description = 'API call') {
let lastError
for (let attempt = 0; attempt < MAX_RETRIES; attempt++) {
try {
const result = await fn()
return result
} catch (error) {
lastError = error
const isRateLimited = error.status === 403 &&
(error.message?.includes('rate limit') || error.message?.includes('secondary'))
if (!isRateLimited) {
throw error
}
// Parse retry-after header, default to 60 seconds
const retryAfter = error.response?.headers?.['retry-after']
? parseInt(error.response.headers['retry-after'])
: 60
// Exponential backoff: retryAfter * 2^attempt
const backoffMs = retryAfter * 1000 * Math.pow(2, attempt)
core.warning(`${description}: Rate limited (attempt ${attempt + 1}/${MAX_RETRIES}). Waiting ${backoffMs / 1000}s before retry...`)
await sleep(backoffMs)
}
}
core.error(`${description}: Max retries (${MAX_RETRIES}) exceeded`)
throw lastError
}
const query = `
query($owner: String!, $repo: String!, $cursor: String) {
repository(owner: $owner, name: $repo) {
pullRequests(first: 100, states: OPEN, after: $cursor) {
pageInfo {
hasNextPage
endCursor
}
nodes {
number
title
author {
login
}
createdAt
commits(last: 1) {
nodes {
commit {
committedDate
}
}
}
comments(last: 1) {
nodes {
createdAt
}
}
reviews(last: 1) {
nodes {
createdAt
}
}
}
}
}
}
`
const allPrs = []
let cursor = null
let hasNextPage = true
let pageCount = 0
while (hasNextPage) {
pageCount++
core.info(`Fetching page ${pageCount} of open PRs...`)
const result = await withRetry(
() => github.graphql(query, { owner, repo, cursor }),
`GraphQL page ${pageCount}`
)
allPrs.push(...result.repository.pullRequests.nodes)
hasNextPage = result.repository.pullRequests.pageInfo.hasNextPage
cursor = result.repository.pullRequests.pageInfo.endCursor
core.info(`Page ${pageCount}: fetched ${result.repository.pullRequests.nodes.length} PRs (total: ${allPrs.length})`)
// Delay between pagination requests (use small batch delay for reads)
if (hasNextPage) {
await sleep(SMALL_BATCH_DELAY_MS)
}
}
core.info(`Found ${allPrs.length} open pull requests`)
const stalePrs = allPrs.filter((pr) => {
const dates = [
new Date(pr.createdAt),
pr.commits.nodes[0] ? new Date(pr.commits.nodes[0].commit.committedDate) : null,
pr.comments.nodes[0] ? new Date(pr.comments.nodes[0].createdAt) : null,
pr.reviews.nodes[0] ? new Date(pr.reviews.nodes[0].createdAt) : null,
].filter((d) => d !== null)
const lastActivity = dates.sort((a, b) => b.getTime() - a.getTime())[0]
if (!lastActivity || lastActivity > cutoff) {
core.info(`PR #${pr.number} is fresh (last activity: ${lastActivity?.toISOString() || "unknown"})`)
return false
}
core.info(`PR #${pr.number} is STALE (last activity: ${lastActivity.toISOString()})`)
return true
})
if (!stalePrs.length) {
core.info("No stale pull requests found.")
return
}
core.info(`Found ${stalePrs.length} stale pull requests`)
// ============================================
// Close stale PRs
// ============================================
const requestDelayMs = stalePrs.length > SMALL_BATCH_THRESHOLD
? LARGE_BATCH_DELAY_MS
: SMALL_BATCH_DELAY_MS
core.info(`Using ${requestDelayMs}ms delay between operations (${stalePrs.length > SMALL_BATCH_THRESHOLD ? 'large' : 'small'} batch mode)`)
let closedCount = 0
let skippedCount = 0
for (const pr of stalePrs) {
const issue_number = pr.number
const closeComment = `Closing this pull request because it has had no updates for more than ${DAYS_INACTIVE} days. If you plan to continue working on it, feel free to reopen or open a new PR.`
if (dryRun) {
core.info(`[dry-run] Would close PR #${issue_number} from ${pr.author?.login || 'unknown'}: ${pr.title}`)
continue
}
try {
// Add comment
await withRetry(
() => github.rest.issues.createComment({
owner,
repo,
issue_number,
body: closeComment,
}),
`Comment on PR #${issue_number}`
)
// Close PR
await withRetry(
() => github.rest.pulls.update({
owner,
repo,
pull_number: issue_number,
state: "closed",
}),
`Close PR #${issue_number}`
)
closedCount++
core.info(`Closed PR #${issue_number} from ${pr.author?.login || 'unknown'}: ${pr.title}`)
// Delay before processing next PR
await sleep(requestDelayMs)
} catch (error) {
skippedCount++
core.error(`Failed to close PR #${issue_number}: ${error.message}`)
}
}
const elapsed = Math.round((Date.now() - startTime) / 1000)
core.info(`\n========== Summary ==========`)
core.info(`Total open PRs found: ${allPrs.length}`)
core.info(`Stale PRs identified: ${stalePrs.length}`)
core.info(`PRs closed: ${closedCount}`)
core.info(`PRs skipped (errors): ${skippedCount}`)
core.info(`Elapsed time: ${elapsed}s`)
core.info(`=============================`)

View File

@@ -7,6 +7,7 @@ on:
- ci
- dev
- beta
- fix/npm-native-binary-install
- snapshot-*
workflow_dispatch:
inputs:

View File

@@ -29,7 +29,7 @@
},
"packages/app": {
"name": "@opencode-ai/app",
"version": "1.14.48",
"version": "1.15.0",
"dependencies": {
"@kobalte/core": "catalog:",
"@opencode-ai/core": "workspace:*",
@@ -84,7 +84,7 @@
},
"packages/console/app": {
"name": "@opencode-ai/console-app",
"version": "1.14.48",
"version": "1.15.0",
"dependencies": {
"@cloudflare/vite-plugin": "1.15.2",
"@ibm/plex": "6.4.1",
@@ -119,7 +119,7 @@
},
"packages/console/core": {
"name": "@opencode-ai/console-core",
"version": "1.14.48",
"version": "1.15.0",
"dependencies": {
"@aws-sdk/client-sts": "3.782.0",
"@jsx-email/render": "1.1.1",
@@ -146,7 +146,7 @@
},
"packages/console/function": {
"name": "@opencode-ai/console-function",
"version": "1.14.48",
"version": "1.15.0",
"dependencies": {
"@ai-sdk/anthropic": "3.0.64",
"@ai-sdk/openai": "3.0.48",
@@ -168,7 +168,7 @@
},
"packages/console/mail": {
"name": "@opencode-ai/console-mail",
"version": "1.14.48",
"version": "1.15.0",
"dependencies": {
"@jsx-email/all": "2.2.3",
"@jsx-email/cli": "1.4.3",
@@ -192,7 +192,7 @@
},
"packages/core": {
"name": "@opencode-ai/core",
"version": "1.14.48",
"version": "1.15.0",
"bin": {
"opencode": "./bin/opencode",
},
@@ -253,7 +253,7 @@
},
"packages/desktop": {
"name": "@opencode-ai/desktop",
"version": "1.14.48",
"version": "1.15.0",
"dependencies": {
"drizzle-orm": "catalog:",
"effect": "catalog:",
@@ -307,7 +307,7 @@
},
"packages/enterprise": {
"name": "@opencode-ai/enterprise",
"version": "1.14.48",
"version": "1.15.0",
"dependencies": {
"@opencode-ai/core": "workspace:*",
"@opencode-ai/ui": "workspace:*",
@@ -337,7 +337,7 @@
},
"packages/function": {
"name": "@opencode-ai/function",
"version": "1.14.48",
"version": "1.15.0",
"dependencies": {
"@octokit/auth-app": "8.0.1",
"@octokit/rest": "catalog:",
@@ -353,7 +353,7 @@
},
"packages/http-recorder": {
"name": "@opencode-ai/http-recorder",
"version": "1.14.48",
"version": "1.15.0",
"dependencies": {
"@effect/platform-node": "catalog:",
"effect": "catalog:",
@@ -366,7 +366,7 @@
},
"packages/llm": {
"name": "@opencode-ai/llm",
"version": "1.14.48",
"version": "1.15.0",
"dependencies": {
"@smithy/eventstream-codec": "4.2.14",
"@smithy/util-utf8": "4.2.2",
@@ -384,7 +384,7 @@
},
"packages/opencode": {
"name": "opencode",
"version": "1.14.48",
"version": "1.15.0",
"bin": {
"opencode": "./bin/opencode",
},
@@ -421,6 +421,7 @@
"@octokit/graphql": "9.0.2",
"@octokit/rest": "catalog:",
"@openauthjs/openauth": "catalog:",
"@opencode-ai/llm": "workspace:*",
"@opencode-ai/plugin": "workspace:*",
"@opencode-ai/script": "workspace:*",
"@opencode-ai/sdk": "workspace:*",
@@ -489,6 +490,7 @@
"@babel/core": "7.28.4",
"@octokit/webhooks-types": "7.6.1",
"@opencode-ai/core": "workspace:*",
"@opencode-ai/http-recorder": "workspace:*",
"@opencode-ai/script": "workspace:*",
"@parcel/watcher-darwin-arm64": "2.5.1",
"@parcel/watcher-darwin-x64": "2.5.1",
@@ -520,7 +522,7 @@
},
"packages/plugin": {
"name": "@opencode-ai/plugin",
"version": "1.14.48",
"version": "1.15.0",
"dependencies": {
"@opencode-ai/sdk": "workspace:*",
"effect": "catalog:",
@@ -536,9 +538,9 @@
"typescript": "catalog:",
},
"peerDependencies": {
"@opentui/core": ">=0.2.8",
"@opentui/keymap": ">=0.2.8",
"@opentui/solid": ">=0.2.8",
"@opentui/core": ">=0.2.10",
"@opentui/keymap": ">=0.2.10",
"@opentui/solid": ">=0.2.10",
},
"optionalPeers": [
"@opentui/core",
@@ -558,7 +560,7 @@
},
"packages/sdk/js": {
"name": "@opencode-ai/sdk",
"version": "1.14.48",
"version": "1.15.0",
"dependencies": {
"cross-spawn": "catalog:",
},
@@ -573,7 +575,7 @@
},
"packages/slack": {
"name": "@opencode-ai/slack",
"version": "1.14.48",
"version": "1.15.0",
"dependencies": {
"@opencode-ai/sdk": "workspace:*",
"@slack/bolt": "^3.17.1",
@@ -608,7 +610,7 @@
},
"packages/ui": {
"name": "@opencode-ai/ui",
"version": "1.14.48",
"version": "1.15.0",
"dependencies": {
"@kobalte/core": "catalog:",
"@opencode-ai/core": "workspace:*",
@@ -657,7 +659,7 @@
},
"packages/web": {
"name": "@opencode-ai/web",
"version": "1.14.48",
"version": "1.15.0",
"dependencies": {
"@astrojs/cloudflare": "12.6.3",
"@astrojs/markdown-remark": "6.3.1",
@@ -721,9 +723,9 @@
"@npmcli/arborist": "9.4.0",
"@octokit/rest": "22.0.0",
"@openauthjs/openauth": "0.0.0-20250322224806",
"@opentui/core": "0.2.8",
"@opentui/keymap": "0.2.8",
"@opentui/solid": "0.2.8",
"@opentui/core": "0.2.10",
"@opentui/keymap": "0.2.10",
"@opentui/solid": "0.2.10",
"@pierre/diffs": "1.1.0-beta.18",
"@playwright/test": "1.59.1",
"@sentry/solid": "10.36.0",
@@ -764,7 +766,7 @@
"tailwindcss": "4.1.11",
"typescript": "5.8.2",
"ulid": "3.0.1",
"virtua": "0.42.3",
"virtua": "0.49.1",
"vite": "7.1.4",
"vite-plugin-solid": "2.11.10",
"zod": "4.1.8",
@@ -1590,23 +1592,23 @@
"@opentelemetry/semantic-conventions": ["@opentelemetry/semantic-conventions@1.40.0", "", {}, "sha512-cifvXDhcqMwwTlTK04GBNeIe7yyo28Mfby85QXFe1Yk8nmi36Ab/5UQwptOx84SsoGNRg+EVSjwzfSZMy6pmlw=="],
"@opentui/core": ["@opentui/core@0.2.8", "", { "dependencies": { "bun-ffi-structs": "0.2.2", "diff": "9.0.0", "marked": "17.0.1", "string-width": "7.2.0", "strip-ansi": "7.1.2", "yoga-layout": "3.2.1" }, "optionalDependencies": { "@opentui/core-darwin-arm64": "0.2.8", "@opentui/core-darwin-x64": "0.2.8", "@opentui/core-linux-arm64": "0.2.8", "@opentui/core-linux-x64": "0.2.8", "@opentui/core-win32-arm64": "0.2.8", "@opentui/core-win32-x64": "0.2.8" }, "peerDependencies": { "web-tree-sitter": "0.25.10" } }, "sha512-bRRiCXuwjS8/6mN1oA5iVaf55z9APyalm7FnoxkLkEyIU1VDaQeTpYtElBbfo1rxtcO6Rj53XywH9oW8auNO9A=="],
"@opentui/core": ["@opentui/core@0.2.10", "", { "dependencies": { "bun-ffi-structs": "0.2.2", "diff": "9.0.0", "marked": "17.0.1", "string-width": "7.2.0", "strip-ansi": "7.1.2", "yoga-layout": "3.2.1" }, "optionalDependencies": { "@opentui/core-darwin-arm64": "0.2.10", "@opentui/core-darwin-x64": "0.2.10", "@opentui/core-linux-arm64": "0.2.10", "@opentui/core-linux-x64": "0.2.10", "@opentui/core-win32-arm64": "0.2.10", "@opentui/core-win32-x64": "0.2.10" }, "peerDependencies": { "web-tree-sitter": "0.25.10" } }, "sha512-oviCtx0jYjc7F8X2b8+0IkQLg6WH47Nwl6CFeZo5dU0k6OpSbTbi07ZleObaiECAp+S1YLhAtVdgzHU7hBZlaw=="],
"@opentui/core-darwin-arm64": ["@opentui/core-darwin-arm64@0.2.8", "", { "os": "darwin", "cpu": "arm64" }, "sha512-Qh6VCMQgW3hWh/7MR51y+XuQezh8NOLwKS8EQSoKzAr4VOc/W5P0/DvgMKgwaqXw2Mz0AIba/BvZ6by20yc4zA=="],
"@opentui/core-darwin-arm64": ["@opentui/core-darwin-arm64@0.2.10", "", { "os": "darwin", "cpu": "arm64" }, "sha512-+lbDDj42Og+UtTZEwlHhGXichmOlkxSqn0J+Jqjat5/Tt5oZykj1NZjFIQ7ZSz4Miz7EmZwgYKE2CyOmmm9MoQ=="],
"@opentui/core-darwin-x64": ["@opentui/core-darwin-x64@0.2.8", "", { "os": "darwin", "cpu": "x64" }, "sha512-wQjJ38C3IiVx/gwwBYxnCarzgD75FdS7IyUErt3lhn57XriNiCbb7ScphWnRMwwtL8CI+bBGzClroDRA2lCfvg=="],
"@opentui/core-darwin-x64": ["@opentui/core-darwin-x64@0.2.10", "", { "os": "darwin", "cpu": "x64" }, "sha512-5iAoA0aqMWWAQ93nh8Bb0ipwt9h+tvEFc88+YO9St43uUJ+XrXcmMj3T8wtl6dSu/SN0UoDWNaUMHUmtykiPtg=="],
"@opentui/core-linux-arm64": ["@opentui/core-linux-arm64@0.2.8", "", { "os": "linux", "cpu": "arm64" }, "sha512-fx4ADeWSSSVU1O/MkMnklCRxtWRy6CLeAvktLlNdPb+BhmQIDg1kpZcdv7m/3cgD1/ksFEXIwO6VTvfKYE0umw=="],
"@opentui/core-linux-arm64": ["@opentui/core-linux-arm64@0.2.10", "", { "os": "linux", "cpu": "arm64" }, "sha512-EnrkxgH5K76Oi/Br1UHPZblXG5P60snmtySfnxuVaeECNZrbTkV6BV/A0WoBeWshJweGbx1D+eTF+sEEjQCi8w=="],
"@opentui/core-linux-x64": ["@opentui/core-linux-x64@0.2.8", "", { "os": "linux", "cpu": "x64" }, "sha512-4ekUyzopBj2ClsUbneLnUOrmZtvU67FCVFLgmBfKL4IvVl/P0YobGNg71gN1JNiYpY7hK77qOpidVLHcNMIE7w=="],
"@opentui/core-linux-x64": ["@opentui/core-linux-x64@0.2.10", "", { "os": "linux", "cpu": "x64" }, "sha512-fI+r3kCPqIxsWwPVGpKUQy4zHK8y+jkDRCwa3UbaUy48RQ44jMuf2RhVhmi4xmCvSc8UPJBbYsw1tLuh9kmXjg=="],
"@opentui/core-win32-arm64": ["@opentui/core-win32-arm64@0.2.8", "", { "os": "win32", "cpu": "arm64" }, "sha512-63K046wpzTzQOLOG9LTsp3+Ld0TNTxeQczexkg0pKSBxZFhws+/9YIGjTctZmJUfE1g1X4tI31dO+KNRpXRHQw=="],
"@opentui/core-win32-arm64": ["@opentui/core-win32-arm64@0.2.10", "", { "os": "win32", "cpu": "arm64" }, "sha512-8F4z2hIRgkVWcr6CMVeJ9N4+1rmURPt2Pq2GBPko8ch6rxHR+a//KD1MfphyuLTHBS1tJ4vfZSWSoiaESImtrA=="],
"@opentui/core-win32-x64": ["@opentui/core-win32-x64@0.2.8", "", { "os": "win32", "cpu": "x64" }, "sha512-+WDiTlTyDpgkis8rPAhW1fS7TwXJih+fk+RYXS2bC3tAKsRD+O3PRSkVABRbjkuXbtfJZf2cjOHZFGN4Vf5qDg=="],
"@opentui/core-win32-x64": ["@opentui/core-win32-x64@0.2.10", "", { "os": "win32", "cpu": "x64" }, "sha512-Ki+qNBlIFW5K2wcG/RHrlPp7yEQKXeiNX3mlje25iwX62Ac5w391HBpOmUjbPoq20McPyDRnhbLfbXQSPtickg=="],
"@opentui/keymap": ["@opentui/keymap@0.2.8", "", { "dependencies": { "@opentui/core": "0.2.8" }, "peerDependencies": { "@opentui/react": "0.2.8", "@opentui/solid": "0.2.8", "react": ">=19.2.0", "solid-js": "1.9.12" }, "optionalPeers": ["@opentui/react", "@opentui/solid", "react", "solid-js"] }, "sha512-/H9j8fP64cf3/nFDCvVP8+7cwU/oRh4sgfQH2NhcPp8illgBb/e9pG5x3vM0nK4RVyTqUvkPXsOeIX5u7vltlg=="],
"@opentui/keymap": ["@opentui/keymap@0.2.10", "", { "dependencies": { "@opentui/core": "0.2.10" }, "peerDependencies": { "@opentui/react": "0.2.10", "@opentui/solid": "0.2.10", "react": ">=19.2.0", "solid-js": "1.9.12" }, "optionalPeers": ["@opentui/react", "@opentui/solid", "react", "solid-js"] }, "sha512-80fU3Lr/98sNIpVYd8PApAeQw8A8D9BemyOGi6jGvTQCl0rxKgvaVBviDRGKxl1INTVjZy9By8UPncc2KJOuWQ=="],
"@opentui/solid": ["@opentui/solid@0.2.8", "", { "dependencies": { "@babel/core": "7.28.0", "@babel/preset-typescript": "7.27.1", "@opentui/core": "0.2.8", "babel-plugin-module-resolver": "5.0.2", "babel-preset-solid": "1.9.12", "entities": "7.0.1", "s-js": "^0.4.9" }, "peerDependencies": { "solid-js": "1.9.12" } }, "sha512-f2g0riBuzk4/ZmcJnp1k13odUmNZcfA3nF7RzdSlEfpkwNDfc4xqnRAwYbNNDwGNrJX0JDCTEZY5ZEhuL155MQ=="],
"@opentui/solid": ["@opentui/solid@0.2.10", "", { "dependencies": { "@babel/core": "7.28.0", "@babel/preset-typescript": "7.27.1", "@opentui/core": "0.2.10", "babel-plugin-module-resolver": "5.0.2", "babel-preset-solid": "1.9.12", "entities": "7.0.1", "s-js": "^0.4.9" }, "peerDependencies": { "solid-js": "1.9.12" } }, "sha512-+4/MB90yIQiPwg8Y4wY092yva9BvRTsJeeeEO3e2H7P8k8zxYk4G9bzuhqYLxA9mTVQ+zVDlrmFoPQhT7vpIRw=="],
"@oslojs/asn1": ["@oslojs/asn1@1.0.0", "", { "dependencies": { "@oslojs/binary": "1.0.0" } }, "sha512-zw/wn0sj0j0QKbIXfIlnEcTviaCzYOY3V5rAyjR6YtOByFtJiT574+8p9Wlach0lZH9fddD4yb9laEAIl4vXQA=="],
@@ -4904,7 +4906,7 @@
"vfile-message": ["vfile-message@4.0.3", "", { "dependencies": { "@types/unist": "^3.0.0", "unist-util-stringify-position": "^4.0.0" } }, "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw=="],
"virtua": ["virtua@0.42.3", "", { "peerDependencies": { "react": ">=16.14.0", "react-dom": ">=16.14.0", "solid-js": ">=1.0", "svelte": ">=5.0", "vue": ">=3.2" }, "optionalPeers": ["react", "react-dom", "solid-js", "svelte", "vue"] }, "sha512-5FoAKcEvh05qsUF97Yz42SWJ7bwnPExjUYHGuoxz1EUtfWtaOgXaRwnylJbDpA0QcH1rKvJ2qsGRi9MK1fpQbg=="],
"virtua": ["virtua@0.49.1", "", { "peerDependencies": { "react": ">=16.14.0", "react-dom": ">=16.14.0", "solid-js": ">=1.0", "svelte": ">=5.0", "vue": ">=3.2" }, "optionalPeers": ["react", "react-dom", "solid-js", "svelte", "vue"] }, "sha512-6f79msqg3jzNFdqJiS0FSzhRN1EHlDhR7EvW7emp6z5qQ22VdsReiDHflkpMEMhoAyUuYr69nwT0aagiM7NrUg=="],
"vite": ["vite@7.1.4", "", { "dependencies": { "esbuild": "^0.25.0", "fdir": "^6.5.0", "picomatch": "^4.0.3", "postcss": "^8.5.6", "rollup": "^4.43.0", "tinyglobby": "^0.2.14" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "peerDependencies": { "@types/node": "^20.19.0 || >=22.12.0", "jiti": ">=1.21.0", "less": "^4.0.0", "lightningcss": "^1.21.0", "sass": "^1.70.0", "sass-embedded": "^1.70.0", "stylus": ">=0.54.8", "sugarss": "^5.0.0", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["@types/node", "jiti", "less", "lightningcss", "sass", "sass-embedded", "stylus", "sugarss", "terser", "tsx", "yaml"], "bin": { "vite": "bin/vite.js" } }, "sha512-X5QFK4SGynAeeIt+A7ZWnApdUyHYm+pzv/8/A57LqSGcI88U6R6ipOs3uCesdc6yl7nl+zNO0t8LmqAdXcQihw=="],

View File

@@ -70,11 +70,10 @@ const modelHttpErrorsQuery = (product: "go" | "zen") => {
}).json
}
const providerHttpErrorsQuery = (product: "go" | "zen") => {
const providerHttpErrorsQuery = () => {
const filters = [
{ column: "provider", op: "exists" },
{ column: "user_agent", op: "contains", value: "opencode" },
{ column: "isGoTier", op: "=", value: product === "go" ? "true" : "false" },
]
const successHttpStatus = calculatedField({
name: "is_success_http_status",
@@ -101,11 +100,15 @@ const providerHttpErrorsQuery = (product: "go" | "zen") => {
name: "FAILED",
column: failedProviderHttpStatus.name,
filterCombination: "AND",
filters: [...filters, { column: "event_type", op: "=", value: "llm.error" }],
filters: [
...filters,
{ column: "event_type", op: "=", value: "llm.error" },
{ column: "llm.error.code", op: "!=", value: "404" },
],
},
],
formulas: [
{ name: "ERROR", expression: "IF(GTE(SUM($SUCCESS, $FAILED), 50), DIV($FAILED, SUM($SUCCESS, $FAILED)), 0)" },
{ name: "ERROR", expression: "IF(GTE(SUM($SUCCESS, $FAILED), 200), DIV($FAILED, SUM($SUCCESS, $FAILED)), 0)" },
],
timeRange: 900,
}).json
@@ -215,29 +218,10 @@ new honeycomb.Trigger("LowModelTpsZen", {
],
})
new honeycomb.Trigger("IncreasedProviderHttpErrorsGo", {
name: "Increased Provider HTTP Errors [Go]",
new honeycomb.Trigger("IncreasedProviderHttpErrors", {
name: "Increased Provider HTTP Errors",
description,
queryJson: providerHttpErrorsQuery("go"),
alertType: "on_change",
frequency: 300,
thresholds: [{ op: ">=", value: 0.7, exceededLimit: 1 }],
recipients: [
{
id: webhookRecipient.id,
notificationDetails: [
{
variables: [{ name: "type", value: "provider_http_errors" }],
},
],
},
],
})
new honeycomb.Trigger("IncreasedProviderHttpErrorsZen", {
name: "Increased Provider HTTP Errors [Zen]",
description,
queryJson: providerHttpErrorsQuery("zen"),
queryJson: providerHttpErrorsQuery(),
alertType: "on_change",
frequency: 300,
thresholds: [{ op: ">=", value: 0.7, exceededLimit: 1 }],

View File

@@ -1,8 +1,8 @@
{
"nodeModules": {
"x86_64-linux": "sha256-cRhvzZoW6gBbE0sQm1+e+6/WgajuA6MSIL5iroFsfqs=",
"aarch64-linux": "sha256-0knZfxBULqkt5u6sXFx+a/vqw2rc6IC1+LeAd4TNFhM=",
"aarch64-darwin": "sha256-jL4tO+EHSmUF+gQGEaLzAbTxxjkL8OyhTk13vsbomgM=",
"x86_64-darwin": "sha256-bsa7IpS3GaxagcigTa0yqZTkf4e/nbcTQ9aZeb+5eHQ="
"x86_64-linux": "sha256-07DT18KACITDbFSEg7zSMeYmV5eo9HWh1EDymtn4oL0=",
"aarch64-linux": "sha256-PeTiu0LaidmELn5qvX05JveU0fV+ySZq5MhNvCIjc3s=",
"aarch64-darwin": "sha256-CmqfaUVl568EqrUph08DH4GxG0avK67qf/LMlKx/yUs=",
"x86_64-darwin": "sha256-RQlFa75DmRONvClPesTYR4TzOmyj3U1xKmO7Mz7CBvE="
}
}

View File

@@ -35,9 +35,9 @@
"@types/cross-spawn": "6.0.6",
"@octokit/rest": "22.0.0",
"@hono/zod-validator": "0.4.2",
"@opentui/core": "0.2.8",
"@opentui/keymap": "0.2.8",
"@opentui/solid": "0.2.8",
"@opentui/core": "0.2.10",
"@opentui/keymap": "0.2.10",
"@opentui/solid": "0.2.10",
"ulid": "3.0.1",
"@kobalte/core": "0.13.11",
"@types/luxon": "3.7.1",
@@ -74,7 +74,7 @@
"shiki": "3.20.0",
"solid-list": "0.3.0",
"tailwindcss": "4.1.11",
"virtua": "0.42.3",
"virtua": "0.49.1",
"vite": "7.1.4",
"@solidjs/meta": "0.29.4",
"@solidjs/router": "0.15.4",

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/app",
"version": "1.14.48",
"version": "1.15.0",
"description": "",
"type": "module",
"exports": {

View File

@@ -13,6 +13,7 @@ const statusLabels = {
connected: "mcp.status.connected",
failed: "mcp.status.failed",
needs_auth: "mcp.status.needs_auth",
needs_client_registration: "mcp.status.needs_client_registration",
disabled: "mcp.status.disabled",
} as const
@@ -31,8 +32,16 @@ export const DialogSelectMcp: Component = () => {
const toggle = useMutation(() => ({
mutationFn: async (name: string) => {
if (sync.data.mcp[name]?.status === "connected") await sdk.client.mcp.disconnect({ name })
else await sdk.client.mcp.connect({ name })
const status = sync.data.mcp[name]
if (status?.status === "connected") {
await sdk.client.mcp.disconnect({ name })
return
}
if (status?.status === "needs_auth") {
await sdk.client.mcp.auth.authenticate({ name })
return
}
await sdk.client.mcp.connect({ name })
},
onSuccess: () => queryClient.refetchQueries(queryOptions.mcp(pathKey(sync.directory))),
}))
@@ -67,7 +76,7 @@ export const DialogSelectMcp: Component = () => {
}
const error = () => {
const s = mcpStatus()
return s?.status === "failed" ? s.error : undefined
if (s?.status === "failed" || s?.status === "needs_client_registration") return s.error
}
const enabled = () => status() === "connected"
return (
@@ -78,9 +87,6 @@ export const DialogSelectMcp: Component = () => {
<Show when={statusLabel()}>
<span class="text-11-regular text-text-weaker">{statusLabel()}</span>
</Show>
<Show when={toggle.isPending && toggle.variables === i.name}>
<span class="text-11-regular text-text-weak">{language.t("common.loading.ellipsis")}</span>
</Show>
</div>
<Show when={error()}>
<span class="text-11-regular text-text-weaker truncate">{error()}</span>

View File

@@ -145,7 +145,15 @@ const useMcpToggleMutation = () => {
return useMutation(() => ({
mutationFn: async (name: string) => {
const status = sync.data.mcp[name]
await (status?.status === "connected" ? sdk.client.mcp.disconnect({ name }) : sdk.client.mcp.connect({ name }))
if (status?.status === "connected") {
await sdk.client.mcp.disconnect({ name })
return
}
if (status?.status === "needs_auth") {
await sdk.client.mcp.auth.authenticate({ name })
return
}
await sdk.client.mcp.connect({ name })
},
onSuccess: () => queryClient.refetchQueries(queryOptions.mcp(pathKey(sync.directory))),
onError: (err) => {
@@ -316,7 +324,7 @@ export function StatusPopoverBody(props: { shown: Accessor<boolean> }) {
return (
<button
type="button"
class="flex items-center gap-2 w-full h-8 pl-3 pr-2 py-1 rounded-md hover:bg-surface-raised-base-hover transition-colors text-left"
class="flex items-center gap-2 w-full min-h-8 pl-3 pr-2 py-1 rounded-md hover:bg-surface-raised-base-hover transition-colors text-left"
onClick={() => {
if (toggleMcp.isPending) return
toggleMcp.mutate(name)
@@ -333,7 +341,16 @@ export function StatusPopoverBody(props: { shown: Accessor<boolean> }) {
status() === "needs_auth" || status() === "needs_client_registration",
}}
/>
<span class="text-14-regular text-text-base truncate flex-1">{name}</span>
<span class="flex flex-col min-w-0 flex-1">
<span class="flex items-center gap-2 min-w-0">
<span class="text-14-regular text-text-base truncate">{name}</span>
</span>
<Show when={status() === "needs_auth"}>
<span class="text-11-regular text-text-weaker truncate">
{language.t("mcp.auth.clickToAuthenticate")}
</span>
</Show>
</span>
<div onClick={(event) => event.stopPropagation()}>
<Switch
checked={enabled()}

View File

@@ -14,12 +14,14 @@ export function StatusPopover() {
const sync = useSync()
const [shown, setShown] = createSignal(false)
const ready = createMemo(() => server.healthy() === false || sync.data.mcp_ready)
const healthy = createMemo(() => {
const serverHealthy = server.healthy() === true
const mcpIssue = createMemo(() => {
const mcp = Object.values(sync.data.mcp ?? {})
const issue = mcp.some((item) => item.status !== "connected" && item.status !== "disabled")
return serverHealthy && !issue
const failed = mcp.some((item) => item.status === "failed" || item.status === "needs_client_registration")
const warn = mcp.some((item) => item.status === "needs_auth")
if (failed) return "critical" as const
if (warn) return "warning" as const
})
const healthy = createMemo(() => server.healthy() === true && !mcpIssue())
return (
<Popover
@@ -41,7 +43,9 @@ export function StatusPopover() {
classList={{
"absolute -top-px -right-px size-1.5 rounded-full": true,
"bg-icon-success-base": ready() && healthy(),
"bg-icon-critical-base": server.healthy() === false || (ready() && !healthy()),
"bg-icon-warning-base": ready() && server.healthy() === true && mcpIssue() === "warning",
"bg-icon-critical-base":
server.healthy() === false || (ready() && server.healthy() === true && mcpIssue() === "critical"),
"bg-border-weak-base": server.healthy() === undefined || !ready(),
}}
/>

View File

@@ -125,6 +125,7 @@ export function applyDirectoryEvent(input: {
const info = (event.properties as { info: Session }).info
const result = Binary.search(input.store.session, info.id, (s) => s.id)
if (info.time.archived) {
if (input.store.session[result.index]!.time.archived === info.time.archived) break
if (result.found) {
input.setStore(
"session",

View File

@@ -276,6 +276,7 @@ export const dict = {
"mcp.status.connected": "متصل",
"mcp.status.failed": "فشل",
"mcp.status.needs_auth": "يحتاج إلى مصادقة",
"mcp.auth.clickToAuthenticate": "انقر للمصادقة",
"mcp.status.disabled": "معطل",
"dialog.fork.empty": "لا توجد رسائل للتفرع منها",
"dialog.directory.search.placeholder": "البحث في المجلدات",

View File

@@ -276,6 +276,7 @@ export const dict = {
"mcp.status.connected": "conectado",
"mcp.status.failed": "falhou",
"mcp.status.needs_auth": "precisa de autenticação",
"mcp.auth.clickToAuthenticate": "Clique para autenticar",
"mcp.status.disabled": "desabilitado",
"dialog.fork.empty": "Nenhuma mensagem para bifurcar",
"dialog.directory.search.placeholder": "Buscar pastas",

View File

@@ -300,6 +300,7 @@ export const dict = {
"mcp.status.connected": "povezano",
"mcp.status.failed": "neuspjelo",
"mcp.status.needs_auth": "potrebna autentifikacija",
"mcp.auth.clickToAuthenticate": "Kliknite za autentifikaciju",
"mcp.status.disabled": "onemogućeno",
"dialog.fork.empty": "Nema poruka za fork",

View File

@@ -298,6 +298,7 @@ export const dict = {
"mcp.status.connected": "forbundet",
"mcp.status.failed": "mislykkedes",
"mcp.status.needs_auth": "kræver godkendelse",
"mcp.auth.clickToAuthenticate": "Klik for at godkende",
"mcp.status.disabled": "deaktiveret",
"dialog.fork.empty": "Ingen beskeder at forgrene fra",

View File

@@ -282,6 +282,7 @@ export const dict = {
"mcp.status.connected": "verbunden",
"mcp.status.failed": "fehlgeschlagen",
"mcp.status.needs_auth": "benötigt Authentifizierung",
"mcp.auth.clickToAuthenticate": "Zum Authentifizieren klicken",
"mcp.status.disabled": "deaktiviert",
"dialog.fork.empty": "Keine Nachrichten zum Abzweigen vorhanden",
"dialog.directory.search.placeholder": "Ordner durchsuchen",

View File

@@ -306,6 +306,7 @@ export const dict = {
"mcp.status.failed": "failed",
"mcp.status.needs_auth": "needs auth",
"mcp.status.disabled": "disabled",
"mcp.auth.clickToAuthenticate": "Click to authenticate",
"dialog.fork.empty": "No messages to fork from",
@@ -902,7 +903,7 @@ export const dict = {
"settings.permissions.tool.read.title": "Read",
"settings.permissions.tool.read.description": "Reading a file (matches the file path)",
"settings.permissions.tool.edit.title": "Edit",
"settings.permissions.tool.edit.description": "Modify files, including edits, writes, patches, and multi-edits",
"settings.permissions.tool.edit.description": "Modify files, including edits, writes, and patches",
"settings.permissions.tool.glob.title": "Glob",
"settings.permissions.tool.glob.description": "Match files using glob patterns",
"settings.permissions.tool.grep.title": "Grep",

View File

@@ -299,6 +299,7 @@ export const dict = {
"mcp.status.connected": "conectado",
"mcp.status.failed": "fallido",
"mcp.status.needs_auth": "necesita auth",
"mcp.auth.clickToAuthenticate": "Haz clic para autenticar",
"mcp.status.disabled": "deshabilitado",
"dialog.fork.empty": "No hay mensajes desde donde bifurcar",

View File

@@ -277,6 +277,7 @@ export const dict = {
"mcp.status.connected": "connecté",
"mcp.status.failed": "échoué",
"mcp.status.needs_auth": "nécessite auth",
"mcp.auth.clickToAuthenticate": "Cliquez pour vous authentifier",
"mcp.status.disabled": "désactivé",
"dialog.fork.empty": "Aucun message à partir duquel bifurquer",
"dialog.directory.search.placeholder": "Rechercher des dossiers",

View File

@@ -275,6 +275,7 @@ export const dict = {
"mcp.status.connected": "接続済み",
"mcp.status.failed": "失敗",
"mcp.status.needs_auth": "認証が必要",
"mcp.auth.clickToAuthenticate": "クリックして認証",
"mcp.status.disabled": "無効",
"dialog.fork.empty": "フォーク元のメッセージがありません",
"dialog.directory.search.placeholder": "フォルダを検索",

View File

@@ -275,6 +275,7 @@ export const dict = {
"mcp.status.connected": "연결됨",
"mcp.status.failed": "실패",
"mcp.status.needs_auth": "인증 필요",
"mcp.auth.clickToAuthenticate": "클릭하여 인증",
"mcp.status.disabled": "비활성화됨",
"dialog.fork.empty": "분기할 메시지 없음",
"dialog.directory.search.placeholder": "폴더 검색",

View File

@@ -302,6 +302,7 @@ export const dict = {
"mcp.status.connected": "tilkoblet",
"mcp.status.failed": "mislyktes",
"mcp.status.needs_auth": "trenger autentisering",
"mcp.auth.clickToAuthenticate": "Klikk for å autentisere",
"mcp.status.disabled": "deaktivert",
"dialog.fork.empty": "Ingen meldinger å forgrene fra",

View File

@@ -277,6 +277,7 @@ export const dict = {
"mcp.status.connected": "połączono",
"mcp.status.failed": "niepowodzenie",
"mcp.status.needs_auth": "wymaga autoryzacji",
"mcp.auth.clickToAuthenticate": "Kliknij, aby się uwierzytelnić",
"mcp.status.disabled": "wyłączone",
"dialog.fork.empty": "Brak wiadomości do rozwidlenia",
"dialog.directory.search.placeholder": "Szukaj folderów",

View File

@@ -299,6 +299,7 @@ export const dict = {
"mcp.status.connected": "подключено",
"mcp.status.failed": "ошибка",
"mcp.status.needs_auth": "требуется авторизация",
"mcp.auth.clickToAuthenticate": "Нажмите, чтобы авторизоваться",
"mcp.status.disabled": "отключено",
"dialog.fork.empty": "Нет сообщений для ответвления",

View File

@@ -299,6 +299,7 @@ export const dict = {
"mcp.status.connected": "เชื่อมต่อแล้ว",
"mcp.status.failed": "ล้มเหลว",
"mcp.status.needs_auth": "ต้องการการตรวจสอบสิทธิ์",
"mcp.auth.clickToAuthenticate": "คลิกเพื่อยืนยันตัวตน",
"mcp.status.disabled": "ปิดใช้งาน",
"dialog.fork.empty": "ไม่มีข้อความให้แตกแขนง",

View File

@@ -304,6 +304,7 @@ export const dict = {
"mcp.status.connected": "bağlı",
"mcp.status.failed": "başarısız",
"mcp.status.needs_auth": "kimlik doğrulama gerekli",
"mcp.auth.clickToAuthenticate": "Kimlik doğrulamak için tıklayın",
"mcp.status.disabled": "devre dışı",
"dialog.fork.empty": "Dallandırılacak mesaj yok",

View File

@@ -319,6 +319,7 @@ export const dict = {
"mcp.status.connected": "已连接",
"mcp.status.failed": "失败",
"mcp.status.needs_auth": "需要授权",
"mcp.auth.clickToAuthenticate": "点击进行授权",
"mcp.status.disabled": "已禁用",
"dialog.fork.empty": "没有可用于分叉的消息",

View File

@@ -299,6 +299,7 @@ export const dict = {
"mcp.status.connected": "已連線",
"mcp.status.failed": "失敗",
"mcp.status.needs_auth": "需要授權",
"mcp.auth.clickToAuthenticate": "點擊以進行授權",
"mcp.status.disabled": "已停用",
"dialog.fork.empty": "沒有可用於分支的訊息",

View File

@@ -29,7 +29,7 @@ import { previewSelectedLines } from "@opencode-ai/ui/pierre/selection-bridge"
import { Button } from "@opencode-ai/ui/button"
import { showToast } from "@opencode-ai/ui/toast"
import { checksum } from "@opencode-ai/core/util/encode"
import { useSearchParams } from "@solidjs/router"
import { useLocation, useSearchParams } from "@solidjs/router"
import { NewSessionView, SessionHeader } from "@/components/session"
import { useComments } from "@/context/comments"
import { getSessionPrefetch, SESSION_PREFETCH_TTL } from "@/context/global-sync/session-prefetch"
@@ -75,7 +75,6 @@ type VcsMode = "git" | "branch"
type SessionHistoryWindowInput = {
sessionID: () => string | undefined
messagesReady: () => boolean
loaded: () => number
visibleUserMessages: () => UserMessage[]
historyMore: () => boolean
@@ -85,205 +84,74 @@ type SessionHistoryWindowInput = {
scroller: () => HTMLDivElement | undefined
}
/**
* Maintains the rendered history window for a session timeline.
*
* It keeps initial paint bounded to recent turns, reveals cached turns in
* small batches while scrolling upward, and prefetches older history near top.
*/
function createSessionHistoryWindow(input: SessionHistoryWindowInput) {
const turnInit = 10
const turnBatch = 8
const turnScrollThreshold = 200
const turnPrefetchBuffer = 16
const prefetchCooldownMs = 400
const prefetchNoGrowthLimit = 2
function createSessionHistoryLoader(input: SessionHistoryWindowInput) {
const historyScrollThreshold = 200
let shiftFrame: number | undefined
const [state, setState] = createStore({
turnID: undefined as string | undefined,
turnStart: 0,
prefetchUntil: 0,
prefetchNoGrowth: 0,
shift: false,
})
const initialTurnStart = (len: number) => (len > turnInit ? len - turnInit : 0)
const turnStart = createMemo(() => {
const id = input.sessionID()
const len = input.visibleUserMessages().length
if (!id || len <= 0) return 0
if (state.turnID !== id) return initialTurnStart(len)
if (state.turnStart <= 0) return 0
if (state.turnStart >= len) return initialTurnStart(len)
return state.turnStart
const userMessages = createMemo(() => input.visibleUserMessages(), emptyUserMessages, {
equals: same,
})
const setTurnStart = (start: number) => {
const id = input.sessionID()
const next = start > 0 ? start : 0
if (!id) {
setState({ turnID: undefined, turnStart: next })
return
}
setState({ turnID: id, turnStart: next })
const cancelShiftReset = () => {
if (shiftFrame === undefined) return
cancelAnimationFrame(shiftFrame)
shiftFrame = undefined
}
const renderedUserMessages = createMemo(
() => {
const msgs = input.visibleUserMessages()
const start = turnStart()
if (start <= 0) return msgs
return msgs.slice(start)
},
emptyUserMessages,
{
equals: same,
},
)
const preserveScroll = (fn: () => void) => {
const el = input.scroller()
if (!el) {
fn()
return
}
const beforeTop = el.scrollTop
const beforeHeight = el.scrollHeight
fn()
requestAnimationFrame(() => {
const delta = el.scrollHeight - beforeHeight
if (!delta) return
el.scrollTop = beforeTop + delta
const scheduleShiftReset = () => {
cancelShiftReset()
shiftFrame = requestAnimationFrame(() => {
shiftFrame = undefined
setState("shift", false)
})
}
const backfillTurns = () => {
const start = turnStart()
if (start <= 0) return
const next = start - turnBatch
const nextStart = next > 0 ? next : 0
preserveScroll(() => setTurnStart(nextStart))
}
/** Button path: reveal all cached turns, fetch older history, reveal one batch. */
const loadAndReveal = async () => {
const id = input.sessionID()
if (!id) return
const start = turnStart()
const beforeVisible = input.visibleUserMessages().length
let loaded = input.loaded()
if (start > 0) setTurnStart(0)
if (!input.historyMore() || input.historyLoading()) return
let afterVisible = beforeVisible
let added = 0
while (true) {
await input.loadMore(id)
if (input.sessionID() !== id) return
afterVisible = input.visibleUserMessages().length
const nextLoaded = input.loaded()
const raw = nextLoaded - loaded
added += raw
loaded = nextLoaded
if (afterVisible > beforeVisible) break
if (raw <= 0) break
if (!input.historyMore()) break
}
if (added <= 0) return
if (state.prefetchNoGrowth) setState("prefetchNoGrowth", 0)
const growth = afterVisible - beforeVisible
if (growth <= 0) return
if (turnStart() !== 0) return
const target = Math.min(afterVisible, beforeVisible + turnBatch)
setTurnStart(Math.max(0, afterVisible - target))
}
/** Scroll/prefetch path: fetch older history from server. */
const fetchOlderMessages = async (opts?: { prefetch?: boolean }) => {
const fetchOlderMessages = async () => {
const id = input.sessionID()
if (!id) return
if (!input.historyMore() || input.historyLoading()) return
if (opts?.prefetch) {
const now = Date.now()
if (state.prefetchUntil > now) return
if (state.prefetchNoGrowth >= prefetchNoGrowthLimit) return
setState("prefetchUntil", now + prefetchCooldownMs)
}
const start = turnStart()
// TODO(session-timeline): switch this to core cursor-based part pagination when that API lands.
const beforeVisible = input.visibleUserMessages().length
const beforeRendered = start <= 0 ? beforeVisible : renderedUserMessages().length
let loaded = input.loaded()
let added = 0
let growth = 0
cancelShiftReset()
setState("shift", true)
while (true) {
await input.loadMore(id)
if (input.sessionID() !== id) return
const nextLoaded = input.loaded()
const raw = nextLoaded - loaded
added += raw
loaded = nextLoaded
growth = input.visibleUserMessages().length - beforeVisible
if (growth > 0) break
if (raw <= 0) break
if (opts?.prefetch) break
if (!input.historyMore()) break
}
const afterVisible = input.visibleUserMessages().length
if (opts?.prefetch) {
setState("prefetchNoGrowth", added > 0 ? 0 : state.prefetchNoGrowth + 1)
} else if (added > 0 && state.prefetchNoGrowth) {
setState("prefetchNoGrowth", 0)
}
if (added <= 0) return
if (growth <= 0) return
if (opts?.prefetch) {
const current = turnStart()
preserveScroll(() => setTurnStart(current + growth))
if (growth > 0) {
scheduleShiftReset()
return
}
if (turnStart() !== start) return
const currentRendered = renderedUserMessages().length
const base = Math.max(beforeRendered, currentRendered)
const target = Math.min(afterVisible, base + turnBatch)
preserveScroll(() => setTurnStart(Math.max(0, afterVisible - target)))
setState("shift", false)
}
const loadAndReveal = () => fetchOlderMessages()
const onScrollerScroll = () => {
if (!input.userScrolled()) return
const el = input.scroller()
if (!el) return
if (el.scrollTop >= turnScrollThreshold) return
const start = turnStart()
if (start > 0) {
if (start <= turnPrefetchBuffer) {
void fetchOlderMessages({ prefetch: true })
}
backfillTurns()
return
}
if (el.scrollTop >= historyScrollThreshold) return
void fetchOlderMessages()
}
@@ -292,27 +160,18 @@ function createSessionHistoryWindow(input: SessionHistoryWindowInput) {
on(
input.sessionID,
() => {
setState({ prefetchUntil: 0, prefetchNoGrowth: 0 })
cancelShiftReset()
setState({ shift: false })
},
{ defer: true },
),
)
createEffect(
on(
() => [input.sessionID(), input.messagesReady()] as const,
([id, ready]) => {
if (!id || !ready) return
setTurnStart(initialTurnStart(input.visibleUserMessages().length))
},
{ defer: true },
),
)
onCleanup(cancelShiftReset)
return {
turnStart,
setTurnStart,
renderedUserMessages,
userMessages,
shift: () => state.shift,
loadAndReveal,
onScrollerScroll,
}
@@ -333,6 +192,7 @@ export default function Page() {
const comments = useComments()
const terminal = useTerminal()
const [searchParams, setSearchParams] = useSearchParams<{ prompt?: string }>()
const location = useLocation()
const { params, sessionKey, tabs, view } = useSessionLayout()
createEffect(() => {
@@ -737,6 +597,7 @@ export default function Page() {
let dockHeight = 0
let scroller: HTMLDivElement | undefined
let content: HTMLDivElement | undefined
let revealMessage = (_id: string) => {}
let scrollMark = 0
let messageMark = 0
@@ -1403,9 +1264,8 @@ export default function Page() {
},
)
const historyWindow = createSessionHistoryWindow({
const historyLoader = createSessionHistoryLoader({
sessionID: () => params.id,
messagesReady,
loaded: () => messages().length,
visibleUserMessages,
historyMore,
@@ -1427,9 +1287,9 @@ export default function Page() {
const el = scroller
if (!el) return
if (el.scrollHeight > el.clientHeight + 1) return
if (historyWindow.turnStart() <= 0 && !historyMore()) return
if (!historyMore()) return
void historyWindow.loadAndReveal()
void historyLoader.loadAndReveal()
})
}
@@ -1439,15 +1299,14 @@ export default function Page() {
[
params.id,
messagesReady(),
historyWindow.turnStart(),
historyMore(),
historyLoading(),
autoScroll.userScrolled(),
visibleUserMessages().length,
] as const,
([id, ready, start, more, loading, scrolled]) => {
([id, ready, more, loading, scrolled]) => {
if (!id || !ready || loading || scrolled) return
if (start <= 0 && !more) return
if (!more) return
fill()
},
{ defer: true },
@@ -1749,15 +1608,14 @@ export default function Page() {
historyMore,
historyLoading,
loadMore: (sessionID) => sync.session.history.loadMore(sessionID),
turnStart: historyWindow.turnStart,
currentMessageId: () => store.messageId,
pendingMessage: () => ui.pendingMessage,
setPendingMessage: (value) => setUi("pendingMessage", value),
setActiveMessage,
setTurnStart: historyWindow.setTurnStart,
autoScroll,
scroller: () => scroller,
anchor,
revealMessage: (id) => revealMessage(id),
scheduleScrollState,
consumePendingMessage: layout.pendingMessage.consume,
})
@@ -1830,20 +1688,23 @@ export default function Page() {
>
<div class="flex-1 min-h-0 overflow-hidden">
<Switch>
<Match when={params.id && mobileChanges()}>
<div class="relative h-full overflow-hidden">
{reviewContent({
diffStyle: "unified",
classes: {
root: "pb-8",
header: "px-4",
container: "px-4",
},
loadingClass: "px-4 py-4 text-text-weak",
emptyClass: "h-full pb-64 -mt-4 flex flex-col items-center justify-center text-center gap-6",
})}
</div>
</Match>
<Match when={params.id}>
<Show when={messagesReady()}>
<MessageTimeline
mobileChanges={mobileChanges()}
mobileFallback={reviewContent({
diffStyle: "unified",
classes: {
root: "pb-8",
header: "px-4",
container: "px-4",
},
loadingClass: "px-4 py-4 text-text-weak",
emptyClass: "h-full pb-64 -mt-4 flex flex-col items-center justify-center text-center gap-6",
})}
actions={actions}
scroll={ui.scroll}
onResumeScroll={resumeScroll}
@@ -1853,8 +1714,11 @@ export default function Page() {
onMarkScrollGesture={markScrollGesture}
hasScrollGesture={hasScrollGesture}
onUserScroll={markUserScroll}
onTurnBackfillScroll={historyWindow.onScrollerScroll}
onHistoryScroll={historyLoader.onScrollerScroll}
onAutoScrollInteraction={autoScroll.handleInteraction}
shouldAnchorBottom={() =>
!location.hash && !store.messageId && !ui.pendingMessage && !autoScroll.userScrolled()
}
centered={centered()}
setContentRef={(el) => {
content = el
@@ -1863,14 +1727,12 @@ export default function Page() {
const root = scroller
if (root) scheduleScrollState(root)
}}
turnStart={historyWindow.turnStart()}
historyMore={historyMore()}
historyLoading={historyLoading()}
onLoadEarlier={() => {
void historyWindow.loadAndReveal()
}}
renderedUserMessages={historyWindow.renderedUserMessages()}
historyShift={historyLoader.shift()}
userMessages={historyLoader.userMessages()}
anchor={anchor}
setRevealMessage={(fn) => {
revealMessage = fn
}}
/>
</Show>
</Match>

View File

@@ -0,0 +1,337 @@
import { parseCommentNote, readCommentMetadata } from "@/utils/comment-note"
import { AssistantMessage, Part, SessionStatus, SnapshotFileDiff, UserMessage } from "@opencode-ai/sdk/v2"
import { groupParts, PartGroup, renderable } from "@opencode-ai/ui/message-part"
import { Data, Equal } from "effect"
export type SummaryDiff = SnapshotFileDiff & { file: string }
export type TimelineRowMap = {
CommentStrip: {
userMessageID: string
previousUserMessage: boolean
}
UserMessage: {
userMessageID: string
anchor: boolean
previousUserMessage: boolean
}
TurnDivider: {
userMessageID: string
label: "compaction" | "interrupted"
}
AssistantPart: {
userMessageID: string
group: PartGroup
previousAssistantPart: boolean
lastAssistantPart: boolean
}
Thinking: { userMessageID: string; reasoningHeading?: string }
Retry: { userMessageID: string }
DiffSummary: { userMessageID: string; diffs: SummaryDiff[] }
Error: { userMessageID: string; text: string }
BottomSpacer: {}
}
export namespace TimelineRow {
export class CommentStrip extends Data.TaggedClass("CommentStrip")<{
userMessageID: string
previousUserMessage: boolean
}> {}
export class UserMessage extends Data.TaggedClass("UserMessage")<{
userMessageID: string
anchor: boolean
previousUserMessage: boolean
}> {}
export class TurnDivider extends Data.TaggedClass("TurnDivider")<{
userMessageID: string
label: "compaction" | "interrupted"
}> {}
export class AssistantPart extends Data.TaggedClass("AssistantPart")<{
userMessageID: string
group: PartGroup
previousAssistantPart: boolean
lastAssistantPart: boolean
}> {}
export class Thinking extends Data.TaggedClass("Thinking")<{
userMessageID: string
reasoningHeading?: string
}> {}
export class DiffSummary extends Data.TaggedClass("DiffSummary")<{
userMessageID: string
diffs: SummaryDiff[]
}> {}
export class Error extends Data.TaggedClass("Error")<{
userMessageID: string
text: string
}> {}
export class Retry extends Data.TaggedClass("Retry")<{
userMessageID: string
}> {}
export class BottomSpacer extends Data.TaggedClass("BottomSpacer")<{}> {}
export type TimelineRow =
| CommentStrip
| UserMessage
| TurnDivider
| AssistantPart
| Thinking
| DiffSummary
| Error
| Retry
| BottomSpacer
export const key = (row: TimelineRow) => {
switch (row._tag) {
case "CommentStrip":
return `comment-strip:${row.userMessageID}`
case "UserMessage":
return `user-message:${row.userMessageID}`
case "TurnDivider":
return `turn-divider:${row.userMessageID}:${row.label}`
case "AssistantPart":
return `assistant-part:${row.userMessageID}:${row.group.key}`
case "Thinking":
return `thinking:${row.userMessageID}`
case "DiffSummary":
return `diff-summary:${row.userMessageID}`
case "Error":
return `error:${row.userMessageID}`
case "Retry":
return `retry:${row.userMessageID}`
case "BottomSpacer":
return "bottom-spacer"
}
}
export function equals(a: TimelineRow, b: TimelineRow) {
return Equal.equals(a, b)
}
}
export namespace Timeline {
export function constructMessageRows(
userMessage: UserMessage,
getMessageParts: (messageID: string) => Part[],
assistantMessages: AssistantMessage[],
index: number,
showReasoning: boolean,
status: SessionStatus["type"],
isActive: boolean,
) {
const rows: TimelineRow.TimelineRow[] = []
const previousUserMessage = index > 0
const userParts = getMessageParts(userMessage.id)
const comments = userParts.flatMap((p) => MessageComment.fromPart(p) ?? [])
const compaction = userParts.some((p) => p.type === "compaction")
const errorMsg = assistantMessages.find((m) => m.error?.name === "MessageAbortedError")
const interrupted = !!errorMsg
const assistantPartRefs = assistantMessages.flatMap((message) =>
getMessageParts(message.id)
.filter((part) => renderable(part, showReasoning))
.map((part) => ({ messageID: message.id, part })),
)
const assistantGroups = groupParts(assistantPartRefs)
if (comments.length > 0)
rows.push(
new TimelineRow.CommentStrip({
userMessageID: userMessage.id,
previousUserMessage,
}),
)
rows.push(
new TimelineRow.UserMessage({
userMessageID: userMessage.id,
anchor: comments.length === 0,
previousUserMessage: comments.length === 0 && previousUserMessage,
}),
)
if (compaction || interrupted) {
rows.push(
new TimelineRow.TurnDivider({
userMessageID: userMessage.id,
label: compaction ? "compaction" : "interrupted",
}),
)
}
assistantGroups.forEach((group, index) =>
rows.push(
new TimelineRow.AssistantPart({
userMessageID: userMessage.id,
group,
previousAssistantPart: index > 0,
lastAssistantPart: index === assistantGroups.length - 1,
}),
),
)
if (isActive && status === "busy" && !errorMsg?.error && (showReasoning ? assistantPartRefs.length === 0 : true)) {
const heading = assistantMessages
.flatMap((message) => getMessageParts(message.id))
.map((part) => (part.type === "reasoning" && part.text ? reasoningHeading(part.text) : undefined))
.find((value): value is string => !!value)
rows.push(
new TimelineRow.Thinking({
userMessageID: userMessage.id,
reasoningHeading: heading,
}),
)
}
if (isActive && status === "retry") rows.push(new TimelineRow.Retry({ userMessageID: userMessage.id }))
const diffs = (userMessage.summary?.diffs ?? [])
.reduceRight<SummaryDiff[]>((result, diff) => {
if (!isSummaryDiff(diff)) return result
if (result.some((item) => item.file === diff.file)) return result
result.push(diff)
return result
}, [])
.reverse()
if (diffs.length > 0 && (status === "idle" || !isActive)) {
rows.push(
new TimelineRow.DiffSummary({
userMessageID: userMessage.id,
diffs,
}),
)
}
if (errorMsg?.error) {
const data = errorMsg.error.data?.message
rows.push(
new TimelineRow.Error({
userMessageID: userMessage.id,
text: unwrapErrorMessage(
typeof data === "string" ? data : data === undefined || data === null ? "" : String(data),
),
}),
)
}
return rows
}
function isSummaryDiff(value: SnapshotFileDiff): value is SummaryDiff {
return typeof value.file === "string"
}
function reasoningHeading(text: string) {
const markdown = text.replace(/\r\n?/g, "\n")
const html = markdown.match(/<h[1-6][^>]*>([\s\S]*?)<\/h[1-6]>/i)
if (html?.[1]) {
const value = cleanHeading(html[1].replace(/<[^>]+>/g, " "))
if (value) return value
}
const atx = markdown.match(/^\s{0,3}#{1,6}[ \t]+(.+?)(?:[ \t]+#+[ \t]*)?$/m)
if (atx?.[1]) {
const value = cleanHeading(atx[1])
if (value) return value
}
const setext = markdown.match(/^([^\n]+)\n(?:=+|-+)\s*$/m)
if (setext?.[1]) {
const value = cleanHeading(setext[1])
if (value) return value
}
const strong = markdown.match(/^\s*(?:\*\*|__)(.+?)(?:\*\*|__)\s*$/m)
if (strong?.[1]) {
const value = cleanHeading(strong[1])
if (value) return value
}
}
function cleanHeading(value: string) {
return value
.replace(/`([^`]+)`/g, "$1")
.replace(/\[([^\]]+)\]\([^)]+\)/g, "$1")
.replace(/[*_~]+/g, "")
.trim()
}
function unwrapErrorMessage(message: string) {
const text = message.replace(/^Error:\s*/, "").trim()
const parse = (value: string) => {
try {
return JSON.parse(value) as unknown
} catch {
return undefined
}
}
const read = (value: string) => {
const first = parse(value)
if (typeof first !== "string") return first
return parse(first.trim())
}
let json = read(text)
if (json === undefined) {
const start = text.indexOf("{")
const end = text.lastIndexOf("}")
if (start !== -1 && end > start) json = read(text.slice(start, end + 1))
}
if (!record(json)) return message
const err = record(json.error) ? json.error : undefined
if (err) {
const type = typeof err.type === "string" ? err.type : undefined
const msg = typeof err.message === "string" ? err.message : undefined
if (type && msg) return `${type}: ${msg}`
if (msg) return msg
if (type) return type
const code = typeof err.code === "string" ? err.code : undefined
if (code) return code
}
const msg = typeof json.message === "string" ? json.message : undefined
if (msg) return msg
const reason = typeof json.error === "string" ? json.error : undefined
if (reason) return reason
return message
}
function record(value: unknown): value is Record<string, unknown> {
return !!value && typeof value === "object" && !Array.isArray(value)
}
}
export namespace MessageComment {
export type MessageComment = {
path: string
comment: string
selection?: {
startLine: number
endLine: number
}
}
export const fromPart = (part: Part): MessageComment | undefined => {
if (part.type !== "text" || !part.synthetic) return
const next = readCommentMetadata(part.metadata) ?? parseCommentNote(part.text)
if (!next) return
return {
path: next.path,
comment: next.comment,
selection: next.selection
? {
startLine: next.selection.startLine,
endLine: next.selection.endLine,
}
: undefined,
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -11,21 +11,19 @@ export const useSessionHashScroll = (input: {
historyMore: () => boolean
historyLoading: () => boolean
loadMore: (sessionID: string) => Promise<void>
turnStart: () => number
currentMessageId: () => string | undefined
pendingMessage: () => string | undefined
setPendingMessage: (value: string | undefined) => void
setActiveMessage: (message: UserMessage | undefined) => void
setTurnStart: (value: number) => void
autoScroll: { pause: () => void; forceScrollToBottom: () => void }
scroller: () => HTMLDivElement | undefined
anchor: (id: string) => string
revealMessage?: (id: string) => void
scheduleScrollState: (el: HTMLDivElement) => void
consumePendingMessage: (key: string) => string | undefined
}) => {
const visibleUserMessages = createMemo(() => input.visibleUserMessages())
const messageById = createMemo(() => new Map(visibleUserMessages().map((m) => [m.id, m])))
const messageIndex = createMemo(() => new Map(visibleUserMessages().map((m, i) => [m.id, i])))
let pendingKey = ""
let clearing = false
@@ -77,6 +75,7 @@ export const useSessionHashScroll = (input: {
}
const seek = (id: string, behavior: ScrollBehavior, left = 4): boolean => {
input.revealMessage?.(id)
const el = document.getElementById(input.anchor(id))
if (el) return scrollToElement(el, behavior)
if (left <= 0) return false
@@ -89,18 +88,7 @@ export const useSessionHashScroll = (input: {
const scrollToMessage = (message: UserMessage, behavior: ScrollBehavior = "smooth") => {
cancel()
if (input.currentMessageId() !== message.id) input.setActiveMessage(message)
const index = messageIndex().get(message.id) ?? -1
if (index !== -1 && index < input.turnStart()) {
input.setTurnStart(index)
queue(() => {
seek(message.id, behavior)
})
updateHash(message.id)
return
}
input.revealMessage?.(message.id)
if (seek(message.id, behavior)) {
updateHash(message.id)
@@ -154,7 +142,6 @@ export const useSessionHashScroll = (input: {
if (!input.sessionID() || !input.messagesReady()) return
visibleUserMessages()
input.turnStart()
let targetId = input.pendingMessage()
if (!targetId) {

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/console-app",
"version": "1.14.48",
"version": "1.15.0",
"type": "module",
"license": "MIT",
"scripts": {

View File

@@ -9,8 +9,8 @@ export const config = {
github: {
repoUrl: "https://github.com/anomalyco/opencode",
starsFormatted: {
compact: "150K",
full: "150,000",
compact: "160K",
full: "160,000",
},
},
@@ -22,8 +22,8 @@ export const config = {
// Static stats (used on landing page)
stats: {
contributors: "850",
commits: "11,000",
monthlyUsers: "6.5M",
contributors: "900",
commits: "13,000",
monthlyUsers: "7.5M",
},
} as const

View File

@@ -123,7 +123,7 @@ export async function handler(
? createIpRateLimiter(modelInfo.id, modelInfo.rateLimit, ip, input.request)
: createKeyRateLimiter(modelInfo.id, modelInfo.rateLimit, zenApiKey, input.request)
await rateLimiter?.check()
const stickyTracker = createStickyTracker(modelInfo.stickyProvider, sessionId)
const stickyTracker = createStickyTracker(modelInfo.id, modelInfo.stickyProvider, sessionId)
const stickyProvider = await stickyTracker?.get()
const authInfo = await authenticate(modelInfo, zenApiKey)
const billingSource = validateBilling(authInfo, modelInfo)
@@ -216,7 +216,7 @@ export async function handler(
// ie. 400 error is usually provider error like malformed request
res.status !== 400 &&
// ie. openai 404 error: Item with id 'msg_0ead8b004a3b165d0069436a6b6834819896da85b63b196a3f' not found.
res.status !== 404 &&
!(modelInfo.id.startsWith("gpt-") && res.status === 404) &&
// ie. cannot change codex model providers mid-session
modelInfo.stickyProvider !== "strict" &&
modelInfo.fallbackProvider &&
@@ -238,7 +238,7 @@ export async function handler(
dataDumper?.provideRequest(reqBody)
// Store sticky provider
await stickyTracker?.set(providerInfo.id)
if (res.status === 200) await stickyTracker?.set(providerInfo.id)
// Temporarily change 404 to 400 status code b/c solid start automatically override 404 response
const resStatus = res.status === 404 ? 400 : res.status

View File

@@ -1,16 +1,42 @@
import { Resource } from "@opencode-ai/console-resource"
import { Database, eq } from "@opencode-ai/console-core/drizzle/index.js"
import { ModelStickyProviderTable } from "@opencode-ai/console-core/schema/ip.sql.js"
export function createStickyTracker(stickyProvider: "strict" | "prefer" | undefined, session: string) {
export function createStickyTracker(modelId: string, stickyProvider: "strict" | "prefer" | undefined, session: string) {
if (!stickyProvider) return
if (!session) return
const key = `sticky:${session}`
const id = `${modelId}/${session}`
let _providerId: string | undefined
return {
get: async () => {
return await Resource.GatewayKv.get(key)
const data = await Database.use((tx) =>
tx
.select({
providerId: ModelStickyProviderTable.providerId,
})
.from(ModelStickyProviderTable)
.where(eq(ModelStickyProviderTable.id, id))
.limit(1),
)
_providerId = data[0]?.providerId
return _providerId
},
set: async (providerId: string) => {
await Resource.GatewayKv.put(key, providerId, { expirationTtl: 86400 })
if (_providerId === providerId) return
await Database.use((tx) =>
tx
.insert(ModelStickyProviderTable)
.values({
id,
providerId,
})
.onDuplicateKeyUpdate({
set: {
providerId,
},
}),
)
},
}
}

View File

@@ -0,0 +1,7 @@
CREATE TABLE `model_sticky_provider` (
`id` varchar(255) PRIMARY KEY,
`time_created` timestamp(3) NOT NULL DEFAULT (now()),
`time_updated` timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3),
`time_deleted` timestamp(3),
`provider_id` varchar(255) NOT NULL
);

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,7 @@
{
"$schema": "https://json.schemastore.org/package.json",
"name": "@opencode-ai/console-core",
"version": "1.14.48",
"version": "1.15.0",
"private": true,
"type": "module",
"license": "MIT",

View File

@@ -51,3 +51,13 @@ export const ModelTpsRateLimitTable = mysqlTable(
},
(table) => [primaryKey({ columns: [table.id, table.interval] })],
)
export const ModelStickyProviderTable = mysqlTable(
"model_sticky_provider",
{
id: varchar("id", { length: 255 }).notNull(),
...timestamps,
providerId: varchar("provider_id", { length: 255 }).notNull(),
},
(table) => [primaryKey({ columns: [table.id] })],
)

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/console-function",
"version": "1.14.48",
"version": "1.15.0",
"$schema": "https://json.schemastore.org/package.json",
"private": true,
"type": "module",

View File

@@ -1,17 +1,15 @@
import { and, Database, inArray } from "@opencode-ai/console-core/drizzle/index.js"
import { ModelTpsRateLimitTable } from "@opencode-ai/console-core/schema/ip.sql.js"
type Entry = { provider: string; model: string; tps: number }
type Result = Record<string, { qualify: number; unqualify: number }>
type Result = Record<string, { interval: number; qualify: number; unqualify: number }[]>
export default {
async fetch(request: Request) {
if (request.method !== "POST") return new Response("Method Not Allowed", { status: 405 })
const entries = (await request.json()) as Entry[]
if (!Array.isArray(entries) || entries.length === 0) return Response.json({} satisfies Result)
const ids = entries.map((e) => `${e.provider}/${e.model}/${e.tps}`)
const body = (await request.json()) as { ids: string[] }
const ids = body.ids
if (ids.length === 0) return Response.json({} satisfies Result)
const toInterval = (date: Date) =>
parseInt(
@@ -21,7 +19,7 @@ export default {
.substring(0, 12),
)
const now = Date.now()
const intervals = Array.from({ length: 5 }, (_, i) => toInterval(new Date(now - i * 60 * 1000)))
const intervals = Array.from({ length: 30 }, (_, i) => toInterval(new Date(now - i * 60 * 1000)))
const rows = await Database.use((tx) =>
tx
@@ -30,11 +28,16 @@ export default {
.where(and(inArray(ModelTpsRateLimitTable.id, ids), inArray(ModelTpsRateLimitTable.interval, intervals))),
)
const result: Result = Object.fromEntries(ids.map((id) => [id, { qualify: 0, unqualify: 0 }]))
for (const row of rows) {
result[row.id].qualify += row.qualify
result[row.id].unqualify += row.unqualify
}
const rowsByKey = new Map(rows.map((row) => [`${row.id}:${row.interval}`, row]))
const result: Result = Object.fromEntries(
ids.map((id) => [
id,
intervals.map((interval) => {
const row = rowsByKey.get(`${id}:${interval}`)
return { interval, qualify: row?.qualify ?? 0, unqualify: row?.unqualify ?? 0 }
}),
]),
)
return Response.json(result)
},
}

View File

@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/console-mail",
"version": "1.14.48",
"version": "1.15.0",
"dependencies": {
"@jsx-email/all": "2.2.3",
"@jsx-email/cli": "1.4.3",

View File

@@ -1,6 +1,6 @@
{
"$schema": "https://json.schemastore.org/package.json",
"version": "1.14.48",
"version": "1.15.0",
"name": "@opencode-ai/core",
"type": "module",
"license": "MIT",

View File

@@ -5,6 +5,8 @@ import { produce, type Draft } from "immer"
import { ModelV2 } from "./model"
import { PluginV2 } from "./plugin"
import { ProviderV2 } from "./provider"
import { Location } from "./location"
import { EventV2 } from "./event"
type ProviderRecord = {
provider: ProviderV2.Info
@@ -23,6 +25,15 @@ export class ModelNotFoundError extends Schema.TaggedErrorClass<ModelNotFoundErr
modelID: ModelV2.ID,
}) {}
export const Event = {
ModelUpdated: EventV2.define({
type: "catalog.model.updated",
schema: {
model: ModelV2.Info,
},
}),
}
export interface Interface {
readonly provider: {
readonly get: (providerID: ProviderV2.ID) => Effect.Effect<ProviderV2.Info, ProviderNotFoundError>
@@ -56,9 +67,11 @@ export class Service extends Context.Service<Service, Interface>()("@opencode/v2
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
yield* Location.Service
let records = HashMap.empty<ProviderV2.ID, ProviderRecord>()
let defaultModel: { providerID: ProviderV2.ID; modelID: ModelV2.ID } | undefined
const plugin = yield* PluginV2.Service
const events = yield* EventV2.Service
const resolve = (model: ModelV2.Info) => {
const provider = Option.getOrThrow(HashMap.get(records, model.providerID)).provider
@@ -155,14 +168,12 @@ export const layer = Layer.effect(
)
const updated = yield* plugin.trigger("model.update", {}, { model, cancel: false })
if (updated.cancel) return
const next = new ModelV2.Info({ ...updated.model, id: modelID, providerID })
records = HashMap.set(records, providerID, {
provider: record.provider,
models: HashMap.set(
record.models,
modelID,
new ModelV2.Info({ ...updated.model, id: modelID, providerID }),
),
models: HashMap.set(record.models, modelID, next),
})
yield* events.publish(Event.ModelUpdated, { model: resolve(next) })
return
}),
@@ -255,4 +266,4 @@ export const layer = Layer.effect(
const SMALL_MODEL_RE = /\b(nano|flash|lite|mini|haiku|small|fast)\b/
export const defaultLayer = layer.pipe(Layer.provide(PluginV2.defaultLayer))
export const defaultLayer = layer.pipe(Layer.provideMerge(EventV2.defaultLayer), Layer.provide(PluginV2.defaultLayer))

157
packages/core/src/event.ts Normal file
View File

@@ -0,0 +1,157 @@
import { Context, Effect, Layer, Option, PubSub, Schema, Stream } from "effect"
import { Location } from "./location"
import { withStatics } from "./schema"
import { Identifier } from "./util/identifier"
export const ID = Schema.String.pipe(
Schema.brand("Event.ID"),
withStatics((schema) => ({ create: () => schema.make("evt_" + Identifier.ascending()) })),
)
export type ID = typeof ID.Type
export type Definition<Type extends string = string, DataSchema extends Schema.Top = Schema.Top> = {
readonly type: Type
readonly version?: number
readonly aggregate?: string
readonly data: DataSchema
}
export type Data<D extends Definition> = Schema.Schema.Type<D["data"]>
export type Payload<D extends Definition = Definition> = {
readonly id: ID
readonly type: D["type"]
readonly data: Data<D>
readonly version?: number
readonly location?: Location.Ref
readonly metadata?: Record<string, unknown>
}
export type Sync = (event: Payload) => Effect.Effect<void>
export const registry = new Map<string, Definition>()
export function define<const Type extends string, Fields extends Schema.Struct.Fields>(input: {
readonly type: Type
readonly version?: number
readonly aggregate?: string
readonly schema: Fields
}): Schema.Schema<Payload<Definition<Type, Schema.Struct<Fields>>>> & Definition<Type, Schema.Struct<Fields>> {
const Data = Schema.Struct(input.schema)
const Payload = Schema.Struct({
id: ID,
metadata: Schema.optional(Schema.Record(Schema.String, Schema.Unknown)),
type: Schema.Literal(input.type),
version: Schema.optional(Schema.Number),
location: Schema.optional(Location.Ref),
data: Data,
}).annotate({ identifier: input.type })
const definition = Object.assign(Payload, {
type: input.type,
...(input.version === undefined ? {} : { version: input.version }),
...(input.aggregate === undefined ? {} : { aggregate: input.aggregate }),
data: Data,
})
registry.set(input.type, definition)
return definition as Schema.Schema<Payload<Definition<Type, Schema.Struct<Fields>>>> &
Definition<Type, Schema.Struct<Fields>>
}
export function definitions() {
return registry.values().toArray()
}
export interface PublishOptions {
readonly id?: ID
readonly metadata?: Record<string, unknown>
}
export type Unsubscribe = Effect.Effect<void>
export interface Interface {
readonly publish: <D extends Definition>(
definition: D,
data: Data<D>,
options?: PublishOptions,
) => Effect.Effect<Payload<D>>
readonly publishEvent: <D extends Definition>(event: Payload<D>) => Effect.Effect<Payload<D>>
readonly subscribe: <D extends Definition>(definition: D) => Stream.Stream<Payload<D>>
readonly all: () => Stream.Stream<Payload>
readonly sync: (handler: Sync) => Effect.Effect<Unsubscribe>
}
export class Service extends Context.Service<Service, Interface>()("@opencode/Event") {}
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
const all = yield* PubSub.unbounded<Payload>()
const typed = new Map<string, PubSub.PubSub<Payload>>()
const syncHandlers = new Array<Sync>()
const getOrCreate = (definition: Definition) =>
Effect.gen(function* () {
const existing = typed.get(definition.type)
if (existing) return existing
const pubsub = yield* PubSub.unbounded<Payload>()
typed.set(definition.type, pubsub)
return pubsub
})
yield* Effect.addFinalizer(() =>
Effect.gen(function* () {
yield* PubSub.shutdown(all)
yield* Effect.forEach(typed.values(), PubSub.shutdown, { discard: true })
}),
)
function publishEvent<D extends Definition>(event: Payload<D>) {
return Effect.gen(function* () {
for (const sync of syncHandlers) {
yield* sync(event as Payload)
}
const pubsub = typed.get(event.type)
if (pubsub) yield* PubSub.publish(pubsub, event as Payload)
yield* PubSub.publish(all, event as Payload)
return event
})
}
function publish<D extends Definition>(definition: D, data: Data<D>, options?: PublishOptions) {
return Effect.gen(function* () {
const location = Option.getOrUndefined(yield* Effect.serviceOption(Location.Service))
const event = {
id: options?.id ?? ID.create(),
...(options?.metadata ? { metadata: options.metadata } : {}),
type: definition.type,
...(definition.version === undefined ? {} : { version: definition.version }),
...(location ? { location } : {}),
data,
} as Payload<D>
return yield* publishEvent(event)
})
}
const subscribe = <D extends Definition>(definition: D): Stream.Stream<Payload<D>> =>
Stream.unwrap(getOrCreate(definition).pipe(Effect.map((pubsub) => Stream.fromPubSub(pubsub)))).pipe(
Stream.map((event) => event as Payload<D>),
)
const streamAll = (): Stream.Stream<Payload> => Stream.fromPubSub(all)
const sync = (handler: Sync): Effect.Effect<Unsubscribe> =>
Effect.sync(() => {
syncHandlers.push(handler)
return Effect.sync(() => {
const index = syncHandlers.indexOf(handler)
if (index >= 0) syncHandlers.splice(index, 1)
})
})
return Service.of({ publish, publishEvent, subscribe, all: streamAll, sync })
}),
)
export const defaultLayer = layer
export * as EventV2 from "./event"

View File

@@ -5,29 +5,13 @@ function truthy(key: string) {
return value === "true" || value === "1"
}
function falsy(key: string) {
const value = process.env[key]?.toLowerCase()
return value === "false" || value === "0"
}
function number(key: string) {
const value = process.env[key]
if (!value) return undefined
const parsed = Number(value)
return Number.isInteger(parsed) && parsed > 0 ? parsed : undefined
}
const OPENCODE_EXPERIMENTAL = truthy("OPENCODE_EXPERIMENTAL")
const OPENCODE_DISABLE_CLAUDE_CODE = truthy("OPENCODE_DISABLE_CLAUDE_CODE")
const OPENCODE_DISABLE_CLAUDE_CODE_SKILLS =
OPENCODE_DISABLE_CLAUDE_CODE || truthy("OPENCODE_DISABLE_CLAUDE_CODE_SKILLS")
const copy = process.env["OPENCODE_EXPERIMENTAL_DISABLE_COPY_ON_SELECT"]
export const Flag = {
OTEL_EXPORTER_OTLP_ENDPOINT: process.env["OTEL_EXPORTER_OTLP_ENDPOINT"],
OTEL_EXPORTER_OTLP_HEADERS: process.env["OTEL_EXPORTER_OTLP_HEADERS"],
OPENCODE_AUTO_SHARE: truthy("OPENCODE_AUTO_SHARE"),
OPENCODE_AUTO_HEAP_SNAPSHOT: truthy("OPENCODE_AUTO_HEAP_SNAPSHOT"),
OPENCODE_GIT_BASH_PATH: process.env["OPENCODE_GIT_BASH_PATH"],
OPENCODE_CONFIG: process.env["OPENCODE_CONFIG"],
@@ -38,54 +22,29 @@ export const Flag = {
OPENCODE_DISABLE_TERMINAL_TITLE: truthy("OPENCODE_DISABLE_TERMINAL_TITLE"),
OPENCODE_SHOW_TTFD: truthy("OPENCODE_SHOW_TTFD"),
OPENCODE_PERMISSION: process.env["OPENCODE_PERMISSION"],
OPENCODE_DISABLE_DEFAULT_PLUGINS: truthy("OPENCODE_DISABLE_DEFAULT_PLUGINS"),
OPENCODE_DISABLE_LSP_DOWNLOAD: truthy("OPENCODE_DISABLE_LSP_DOWNLOAD"),
OPENCODE_ENABLE_EXPERIMENTAL_MODELS: truthy("OPENCODE_ENABLE_EXPERIMENTAL_MODELS"),
OPENCODE_DISABLE_AUTOCOMPACT: truthy("OPENCODE_DISABLE_AUTOCOMPACT"),
OPENCODE_DISABLE_MODELS_FETCH: truthy("OPENCODE_DISABLE_MODELS_FETCH"),
OPENCODE_DISABLE_MOUSE: truthy("OPENCODE_DISABLE_MOUSE"),
OPENCODE_DISABLE_CLAUDE_CODE,
OPENCODE_DISABLE_CLAUDE_CODE_PROMPT: OPENCODE_DISABLE_CLAUDE_CODE || truthy("OPENCODE_DISABLE_CLAUDE_CODE_PROMPT"),
OPENCODE_DISABLE_CLAUDE_CODE_SKILLS,
OPENCODE_DISABLE_EXTERNAL_SKILLS: truthy("OPENCODE_DISABLE_EXTERNAL_SKILLS"),
OPENCODE_FAKE_VCS: process.env["OPENCODE_FAKE_VCS"],
OPENCODE_SERVER_PASSWORD: process.env["OPENCODE_SERVER_PASSWORD"],
OPENCODE_SERVER_USERNAME: process.env["OPENCODE_SERVER_USERNAME"],
OPENCODE_ENABLE_QUESTION_TOOL: truthy("OPENCODE_ENABLE_QUESTION_TOOL"),
// Experimental
OPENCODE_EXPERIMENTAL,
OPENCODE_EXPERIMENTAL_FILEWATCHER: Config.boolean("OPENCODE_EXPERIMENTAL_FILEWATCHER").pipe(
Config.withDefault(false),
),
OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER: Config.boolean("OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER").pipe(
Config.withDefault(false),
),
OPENCODE_EXPERIMENTAL_ICON_DISCOVERY: OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_ICON_DISCOVERY"),
OPENCODE_EXPERIMENTAL_DISABLE_COPY_ON_SELECT:
copy === undefined ? process.platform === "win32" : truthy("OPENCODE_EXPERIMENTAL_DISABLE_COPY_ON_SELECT"),
OPENCODE_ENABLE_EXA: truthy("OPENCODE_ENABLE_EXA") || OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_EXA"),
OPENCODE_EXPERIMENTAL_BASH_DEFAULT_TIMEOUT_MS: number("OPENCODE_EXPERIMENTAL_BASH_DEFAULT_TIMEOUT_MS"),
OPENCODE_EXPERIMENTAL_OUTPUT_TOKEN_MAX: number("OPENCODE_EXPERIMENTAL_OUTPUT_TOKEN_MAX"),
OPENCODE_EXPERIMENTAL_OXFMT: OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_OXFMT"),
OPENCODE_EXPERIMENTAL_LSP_TY: truthy("OPENCODE_EXPERIMENTAL_LSP_TY"),
OPENCODE_EXPERIMENTAL_LSP_TOOL: OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_LSP_TOOL"),
OPENCODE_EXPERIMENTAL_PLAN_MODE: OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_PLAN_MODE"),
OPENCODE_EXPERIMENTAL_SCOUT: OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_SCOUT"),
OPENCODE_EXPERIMENTAL_MARKDOWN: !falsy("OPENCODE_EXPERIMENTAL_MARKDOWN"),
OPENCODE_ENABLE_PARALLEL: truthy("OPENCODE_ENABLE_PARALLEL") || truthy("OPENCODE_EXPERIMENTAL_PARALLEL"),
OPENCODE_EXPERIMENTAL_MINIMAL_THINKING: truthy("OPENCODE_EXPERIMENTAL_MINIMAL_THINKING"),
OPENCODE_MODELS_URL: process.env["OPENCODE_MODELS_URL"],
OPENCODE_MODELS_PATH: process.env["OPENCODE_MODELS_PATH"],
OPENCODE_DISABLE_EMBEDDED_WEB_UI: truthy("OPENCODE_DISABLE_EMBEDDED_WEB_UI"),
OPENCODE_DB: process.env["OPENCODE_DB"],
OPENCODE_DISABLE_CHANNEL_DB: truthy("OPENCODE_DISABLE_CHANNEL_DB"),
OPENCODE_SKIP_MIGRATIONS: truthy("OPENCODE_SKIP_MIGRATIONS"),
OPENCODE_STRICT_CONFIG_DEPS: truthy("OPENCODE_STRICT_CONFIG_DEPS"),
OPENCODE_WORKSPACE_ID: process.env["OPENCODE_WORKSPACE_ID"],
OPENCODE_EXPERIMENTAL_WORKSPACES: OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_WORKSPACES"),
OPENCODE_EXPERIMENTAL_EVENT_SYSTEM: OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_EVENT_SYSTEM"),
OPENCODE_EXPERIMENTAL_SESSION_SWITCHING: OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_SESSION_SWITCHING"),
// Evaluated at access time (not module load) because tests, the CLI, and
// external tooling set these env vars at runtime.

View File

@@ -0,0 +1,12 @@
import { Layer, LayerMap } from "effect"
import { Location } from "./location"
import { Catalog } from "./catalog"
import { PluginBoot } from "./plugin/boot"
export class LocationServiceMap extends LayerMap.Service<LocationServiceMap>()("@opencode/example/LocationServiceMap", {
lookup: (ref: Location.Ref) => {
const location = Layer.succeed(Location.Service, Location.Service.of(ref))
return Layer.mergeAll(Catalog.defaultLayer, PluginBoot.defaultLayer).pipe(Layer.provide(location))
},
idleTimeToLive: "5 minutes",
}) {}

View File

@@ -0,0 +1,11 @@
import { Context, Schema } from "effect"
export * as Location from "./location"
export const Ref = Schema.Struct({
directory: Schema.String,
workspaceID: Schema.optional(Schema.String),
}).annotate({ identifier: "Location.Ref" })
export type Ref = typeof Ref.Type
export class Service extends Context.Service<Service, Ref>()("@opencode/Location") {}

View File

@@ -0,0 +1,2 @@
// Auto-generated by build.ts - do not edit
export declare const snapshot: Record<string, unknown>

File diff suppressed because one or more lines are too long

View File

@@ -1,14 +1,17 @@
import { Global } from "@opencode-ai/core/global"
import path from "path"
import { Context, Duration, Effect, Layer, Option, Schedule, Schema } from "effect"
import { FetchHttpClient, HttpClient, HttpClientRequest } from "effect/unstable/http"
import { Installation } from "../installation"
import { Flag } from "@opencode-ai/core/flag/flag"
import { Flock } from "@opencode-ai/core/util/flock"
import { Hash } from "@opencode-ai/core/util/hash"
import { AppFileSystem } from "@opencode-ai/core/filesystem"
import { withTransientReadRetry } from "@/util/effect-http-client"
import { CatalogModelStatus } from "./model-status"
import { Global } from "./global"
import { Flag } from "./flag/flag"
import { Flock } from "./util/flock"
import { Hash } from "./util/hash"
import { AppFileSystem } from "./filesystem"
import { InstallationChannel, InstallationVersion } from "./installation/version"
export const CatalogModelStatus = Schema.Literals(["alpha", "beta", "deprecated"])
export type CatalogModelStatus = typeof CatalogModelStatus.Type
const USER_AGENT = `opencode/${InstallationChannel}/${InstallationVersion}/${Flag.OPENCODE_CLIENT}`
const CostTier = Schema.Struct({
input: Schema.Finite,
@@ -109,11 +112,21 @@ export interface Interface {
export class Service extends Context.Service<Service, Interface>()("@opencode/ModelsDev") {}
export const layer: Layer.Layer<Service, never, AppFileSystem.Service | HttpClient.HttpClient> = Layer.effect(
type Requirements = AppFileSystem.Service | HttpClient.HttpClient
export const layer: Layer.Layer<Service, never, Requirements> = Layer.effect(
Service,
Effect.gen(function* () {
const fs = yield* AppFileSystem.Service
const http = HttpClient.filterStatusOk(withTransientReadRetry(yield* HttpClient.HttpClient))
const http = HttpClient.filterStatusOk(
(yield* HttpClient.HttpClient).pipe(
HttpClient.retryTransient({
retryOn: "errors-and-responses",
times: 2,
schedule: Schedule.exponential(200).pipe(Schedule.jittered),
}),
),
)
const source = Flag.OPENCODE_MODELS_URL || "https://models.dev"
const filepath = path.join(
@@ -132,7 +145,7 @@ export const layer: Layer.Layer<Service, never, AppFileSystem.Service | HttpClie
const fetchApi = Effect.fn("ModelsDev.fetchApi")(function* () {
return yield* HttpClientRequest.get(`${source}/api.json`).pipe(
HttpClientRequest.setHeader("User-Agent", Installation.USER_AGENT),
HttpClientRequest.setHeader("User-Agent", USER_AGENT),
http.execute,
Effect.flatMap((res) => res.text),
Effect.timeout("10 seconds"),

View File

@@ -0,0 +1,71 @@
export * as PluginBoot from "./boot"
import { Context, Deferred, Effect, Layer } from "effect"
import { AuthV2 } from "../auth"
import { Catalog } from "../catalog"
import { Npm } from "../npm"
import { PluginV2 } from "../plugin"
import { AuthPlugin } from "./auth"
import { EnvPlugin } from "./env"
import { ModelsDevPlugin } from "./models-dev"
import { ProviderPlugins } from "./provider"
type Plugin = {
id: PluginV2.ID
effect: Effect.Effect<PluginV2.HookFunctions | void, never, Catalog.Service | AuthV2.Service | Npm.Service>
}
export interface Interface {
readonly wait: () => Effect.Effect<void>
}
export class Service extends Context.Service<Service, Interface>()("@opencode/v2/PluginBoot") {}
export const layer: Layer.Layer<Service, never, Catalog.Service | PluginV2.Service | AuthV2.Service | Npm.Service> =
Layer.effect(
Service,
Effect.gen(function* () {
const catalog = yield* Catalog.Service
const plugin = yield* PluginV2.Service
const auth = yield* AuthV2.Service
const npm = yield* Npm.Service
const done = yield* Deferred.make<void>()
const add = Effect.fn("PluginBoot.add")(function* (input: Plugin) {
yield* plugin.add({
id: input.id,
effect: input.effect.pipe(
Effect.provideService(Catalog.Service, catalog),
Effect.provideService(AuthV2.Service, auth),
Effect.provideService(Npm.Service, npm),
),
})
})
const boot = Effect.gen(function* () {
yield* add(EnvPlugin)
yield* add(AuthPlugin)
for (const item of ProviderPlugins) {
yield* add(item)
}
yield* add(ModelsDevPlugin)
}).pipe(Effect.withSpan("PluginBoot.boot"))
yield* boot.pipe(
Effect.exit,
Effect.flatMap((exit) => Deferred.done(done, exit)),
Effect.forkScoped,
)
return Service.of({
wait: () => Deferred.await(done),
})
}),
)
export const defaultLayer = layer.pipe(
Layer.provide(Catalog.defaultLayer),
Layer.provide(PluginV2.defaultLayer),
Layer.provide(Layer.orDie(AuthV2.defaultLayer)),
Layer.provide(Npm.defaultLayer),
)

View File

@@ -0,0 +1,94 @@
export * as LayerMapExample from "./layer-map.example"
import { Context, Effect, Layer, LayerMap } from "effect"
import { Npm } from "../npm"
/**
* Tutorial: split global services from context-specific services.
*
* Use this pattern when part of the app should be constructed once at the app edge,
* while another part should be cached per request/project/workspace key.
*
* In this example:
* - Npm.Service is the global service. It is not keyed by request context and should
* be provided once by the application runtime.
* - ConfigService is context-specific. It is built from a RequestContext key and is
* cached by LayerMap for that key.
* - ConfigServiceMap.layer owns the cache. Provide it once globally, then each
* request can provide ConfigServiceMap.get(context) to select the right instance.
*
* Lifetime model:
* - ConfigServiceMap.layer has the app/global lifetime and depends on Npm.Service.
* - ConfigServiceMap.get(context) has the request/context lifetime and provides
* ConfigService for exactly that context key.
* - The cached ConfigService entry stays alive while something is using it. Once idle,
* it remains cached for idleTimeToLive, then its scope is finalized.
* - invalidate(context) removes the cache entry for future lookups. Active users keep
* running on the old instance; the next lookup can create a fresh instance.
*
* Key model:
* - Keys can be strings, structs, classes, arrays, etc.
* - Prefer primitive or immutable keys. Effect uses Hash / Equal semantics for cache
* lookup, so mutating an object after it has been used as a key is a bug.
*/
export type RequestContext = {
readonly directory: string
readonly workspace: string
}
export class RequestContextRef extends Context.Service<RequestContextRef, RequestContext>()(
"@opencode/example/RequestContextRef",
) {}
export interface ConfigServiceShape {
readonly directory: string
readonly workspace: string
readonly nextUse: () => Effect.Effect<number>
readonly which: Npm.Interface["which"]
}
export class ConfigService extends Context.Service<ConfigService, ConfigServiceShape>()(
"@opencode/example/ConfigService",
) {}
const configServiceLayer = Layer.effect(
ConfigService,
Effect.gen(function* () {
const context = yield* RequestContextRef
const npm = yield* Npm.Service
let useCount = 0
return ConfigService.of({
directory: context.directory,
workspace: context.workspace,
nextUse: () => Effect.succeed(++useCount),
which: npm.which,
})
}),
)
export class ConfigServiceMap extends LayerMap.Service<ConfigServiceMap>()("@opencode/example/ConfigServiceMap", {
lookup: (context: RequestContext) =>
configServiceLayer.pipe(Layer.provide(Layer.succeed(RequestContextRef, RequestContextRef.of(context)))),
idleTimeToLive: "5 minutes",
}) {}
export const appLayer = ConfigServiceMap.layer
export const readConfig = Effect.fn("LayerMapExample.readConfig")(function* () {
const config = yield* ConfigService
return {
directory: config.directory,
workspace: config.workspace,
useCount: yield* config.nextUse(),
}
})
export const handleRequest = Effect.fn("LayerMapExample.handleRequest")(function* (context: RequestContext) {
return yield* readConfig().pipe(Effect.provide(ConfigServiceMap.get(context)))
})
export const invalidateContext = (context: RequestContext) => ConfigServiceMap.invalidate(context)

View File

@@ -1,9 +1,9 @@
import { DateTime, Effect } from "effect"
import { Catalog } from "@opencode-ai/core/catalog"
import { ModelV2 } from "@opencode-ai/core/model"
import { ProviderV2 } from "@opencode-ai/core/provider"
import { ModelsDev } from "@/provider/models"
import { PluginV2 } from "@opencode-ai/core/plugin"
import { Catalog } from "../catalog"
import { ModelV2 } from "../model"
import { ModelsDev } from "../models"
import { PluginV2 } from "../plugin"
import { ProviderV2 } from "../provider"
function released(date: string) {
const time = Date.parse(date)

View File

@@ -10,6 +10,7 @@ export const NvidiaPlugin = PluginV2.define({
if (evt.provider.id !== ProviderV2.ID.make("nvidia")) return
evt.provider.options.headers["HTTP-Referer"] = "https://opencode.ai/"
evt.provider.options.headers["X-Title"] = "opencode"
evt.provider.options.headers["X-BILLING-INVOKE-ORIGIN"] ??= "OpenCode"
}),
}
}),

View File

@@ -31,7 +31,8 @@ export interface RunResult {
readonly exitCode: number
readonly stdout: Buffer
readonly stderr: Buffer
readonly truncated: boolean
readonly stdoutTruncated: boolean
readonly stderrTruncated: boolean
}
export type Interface = ChildProcessSpawner["Service"] & {
@@ -147,7 +148,8 @@ export const layer = Layer.effect(
exitCode,
stdout: stdout.buffer,
stderr: stderr.buffer,
truncated: stdout.truncated,
stdoutTruncated: stdout.truncated,
stderrTruncated: stderr.truncated,
} satisfies RunResult
}),
)

View File

@@ -1,12 +1,13 @@
import { SessionID } from "@/session/schema"
import { NonNegativeInt } from "@opencode-ai/core/schema"
import { EventV2 } from "./event"
import { FileAttachment, Prompt } from "@opencode-ai/core/session-prompt"
import { Schema } from "effect"
import { EventV2 } from "./event"
import { ModelV2 } from "./model"
import { NonNegativeInt } from "./schema"
import { Session } from "./session"
import { FileAttachment, Prompt } from "./session-prompt"
import { ToolOutput } from "./tool-output"
import { V2Schema } from "./v2-schema"
export { FileAttachment }
import { ToolOutput } from "@opencode-ai/core/tool-output"
import { V2Schema } from "@opencode-ai/core/v2-schema"
import { ModelV2 } from "@opencode-ai/core/model"
export const Source = Schema.Struct({
start: NonNegativeInt,
@@ -15,92 +16,94 @@ export const Source = Schema.Struct({
}).annotate({
identifier: "session.next.event.source",
})
export type Source = Schema.Schema.Type<typeof Source>
export type Source = typeof Source.Type
const Base = {
timestamp: V2Schema.DateTimeUtcFromMillis,
sessionID: SessionID,
sessionID: Session.ID,
}
const options = {
aggregate: "sessionID",
version: 1,
} as const
export const UnknownError = Schema.Struct({
type: Schema.Literal("unknown"),
message: Schema.String,
}).annotate({
identifier: "Session.Error.Unknown",
})
export type UnknownError = Schema.Schema.Type<typeof UnknownError>
export type UnknownError = typeof UnknownError.Type
export const AgentSwitched = EventV2.define({
type: "session.next.agent.switched",
aggregate: "sessionID",
version: 1,
...options,
schema: {
...Base,
agent: Schema.String,
},
})
export type AgentSwitched = Schema.Schema.Type<typeof AgentSwitched>
export type AgentSwitched = typeof AgentSwitched.Type
export const ModelSwitched = EventV2.define({
type: "session.next.model.switched",
aggregate: "sessionID",
version: 1,
...options,
schema: {
...Base,
model: ModelV2.Ref,
},
})
export type ModelSwitched = Schema.Schema.Type<typeof ModelSwitched>
export type ModelSwitched = typeof ModelSwitched.Type
export const Prompted = EventV2.define({
type: "session.next.prompted",
aggregate: "sessionID",
version: 1,
...options,
schema: {
...Base,
prompt: Prompt,
},
})
export type Prompted = Schema.Schema.Type<typeof Prompted>
export type Prompted = typeof Prompted.Type
export const Synthetic = EventV2.define({
type: "session.next.synthetic",
aggregate: "sessionID",
...options,
schema: {
...Base,
text: Schema.String,
},
})
export type Synthetic = Schema.Schema.Type<typeof Synthetic>
export type Synthetic = typeof Synthetic.Type
export namespace Shell {
export const Started = EventV2.define({
type: "session.next.shell.started",
aggregate: "sessionID",
...options,
schema: {
...Base,
callID: Schema.String,
command: Schema.String,
},
})
export type Started = Schema.Schema.Type<typeof Started>
export type Started = typeof Started.Type
export const Ended = EventV2.define({
type: "session.next.shell.ended",
aggregate: "sessionID",
...options,
schema: {
...Base,
callID: Schema.String,
output: Schema.String,
},
})
export type Ended = Schema.Schema.Type<typeof Ended>
export type Ended = typeof Ended.Type
}
export namespace Step {
export const Started = EventV2.define({
type: "session.next.step.started",
aggregate: "sessionID",
...options,
schema: {
...Base,
agent: Schema.String,
@@ -108,11 +111,11 @@ export namespace Step {
snapshot: Schema.String.pipe(Schema.optional),
},
})
export type Started = Schema.Schema.Type<typeof Started>
export type Started = typeof Started.Type
export const Ended = EventV2.define({
type: "session.next.step.ended",
aggregate: "sessionID",
...options,
schema: {
...Base,
finish: Schema.String,
@@ -129,123 +132,123 @@ export namespace Step {
snapshot: Schema.String.pipe(Schema.optional),
},
})
export type Ended = Schema.Schema.Type<typeof Ended>
export type Ended = typeof Ended.Type
export const Failed = EventV2.define({
type: "session.next.step.failed",
aggregate: "sessionID",
...options,
schema: {
...Base,
error: UnknownError,
},
})
export type Failed = Schema.Schema.Type<typeof Failed>
export type Failed = typeof Failed.Type
}
export namespace Text {
export const Started = EventV2.define({
type: "session.next.text.started",
aggregate: "sessionID",
...options,
schema: {
...Base,
},
})
export type Started = Schema.Schema.Type<typeof Started>
export type Started = typeof Started.Type
export const Delta = EventV2.define({
type: "session.next.text.delta",
aggregate: "sessionID",
...options,
schema: {
...Base,
delta: Schema.String,
},
})
export type Delta = Schema.Schema.Type<typeof Delta>
export type Delta = typeof Delta.Type
export const Ended = EventV2.define({
type: "session.next.text.ended",
aggregate: "sessionID",
...options,
schema: {
...Base,
text: Schema.String,
},
})
export type Ended = Schema.Schema.Type<typeof Ended>
export type Ended = typeof Ended.Type
}
export namespace Reasoning {
export const Started = EventV2.define({
type: "session.next.reasoning.started",
aggregate: "sessionID",
...options,
schema: {
...Base,
reasoningID: Schema.String,
},
})
export type Started = Schema.Schema.Type<typeof Started>
export type Started = typeof Started.Type
export const Delta = EventV2.define({
type: "session.next.reasoning.delta",
aggregate: "sessionID",
...options,
schema: {
...Base,
reasoningID: Schema.String,
delta: Schema.String,
},
})
export type Delta = Schema.Schema.Type<typeof Delta>
export type Delta = typeof Delta.Type
export const Ended = EventV2.define({
type: "session.next.reasoning.ended",
aggregate: "sessionID",
...options,
schema: {
...Base,
reasoningID: Schema.String,
text: Schema.String,
},
})
export type Ended = Schema.Schema.Type<typeof Ended>
export type Ended = typeof Ended.Type
}
export namespace Tool {
export namespace Input {
export const Started = EventV2.define({
type: "session.next.tool.input.started",
aggregate: "sessionID",
...options,
schema: {
...Base,
callID: Schema.String,
name: Schema.String,
},
})
export type Started = Schema.Schema.Type<typeof Started>
export type Started = typeof Started.Type
export const Delta = EventV2.define({
type: "session.next.tool.input.delta",
aggregate: "sessionID",
...options,
schema: {
...Base,
callID: Schema.String,
delta: Schema.String,
},
})
export type Delta = Schema.Schema.Type<typeof Delta>
export type Delta = typeof Delta.Type
export const Ended = EventV2.define({
type: "session.next.tool.input.ended",
aggregate: "sessionID",
...options,
schema: {
...Base,
callID: Schema.String,
text: Schema.String,
},
})
export type Ended = Schema.Schema.Type<typeof Ended>
export type Ended = typeof Ended.Type
}
export const Called = EventV2.define({
type: "session.next.tool.called",
aggregate: "sessionID",
...options,
schema: {
...Base,
callID: Schema.String,
@@ -257,11 +260,11 @@ export namespace Tool {
}),
},
})
export type Called = Schema.Schema.Type<typeof Called>
export type Called = typeof Called.Type
export const Progress = EventV2.define({
type: "session.next.tool.progress",
aggregate: "sessionID",
...options,
schema: {
...Base,
callID: Schema.String,
@@ -269,11 +272,11 @@ export namespace Tool {
content: Schema.Array(ToolOutput.Content),
},
})
export type Progress = Schema.Schema.Type<typeof Progress>
export type Progress = typeof Progress.Type
export const Success = EventV2.define({
type: "session.next.tool.success",
aggregate: "sessionID",
...options,
schema: {
...Base,
callID: Schema.String,
@@ -285,11 +288,11 @@ export namespace Tool {
}),
},
})
export type Success = Schema.Schema.Type<typeof Success>
export type Success = typeof Success.Type
export const Failed = EventV2.define({
type: "session.next.tool.failed",
aggregate: "sessionID",
...options,
schema: {
...Base,
callID: Schema.String,
@@ -300,7 +303,7 @@ export namespace Tool {
}),
},
})
export type Failed = Schema.Schema.Type<typeof Failed>
export type Failed = typeof Failed.Type
}
export const RetryError = Schema.Struct({
@@ -313,49 +316,50 @@ export const RetryError = Schema.Struct({
}).annotate({
identifier: "session.next.retry_error",
})
export type RetryError = Schema.Schema.Type<typeof RetryError>
export type RetryError = typeof RetryError.Type
export const Retried = EventV2.define({
type: "session.next.retried",
aggregate: "sessionID",
...options,
schema: {
...Base,
attempt: Schema.Finite,
error: RetryError,
},
})
export type Retried = Schema.Schema.Type<typeof Retried>
export type Retried = typeof Retried.Type
export namespace Compaction {
export const Started = EventV2.define({
type: "session.next.compaction.started",
aggregate: "sessionID",
...options,
schema: {
...Base,
reason: Schema.Union([Schema.Literal("auto"), Schema.Literal("manual")]),
},
})
export type Started = Schema.Schema.Type<typeof Started>
export type Started = typeof Started.Type
export const Delta = EventV2.define({
type: "session.next.compaction.delta",
aggregate: "sessionID",
...options,
schema: {
...Base,
text: Schema.String,
},
})
export type Delta = typeof Delta.Type
export const Ended = EventV2.define({
type: "session.next.compaction.ended",
aggregate: "sessionID",
...options,
schema: {
...Base,
text: Schema.String,
include: Schema.String.pipe(Schema.optional),
},
})
export type Ended = Schema.Schema.Type<typeof Ended>
export type Ended = typeof Ended.Type
}
export const All = Schema.Union(
@@ -392,16 +396,7 @@ export const All = Schema.Union(
},
).pipe(Schema.toTaggedUnion("type"))
// user
// assistant
// assistant
// assistant
// user
// compaction marker
// -> text
// assistant
export type Event = Schema.Schema.Type<typeof All>
export type Event = typeof All.Type
export type Type = Event["type"]
export * as SessionEvent from "./session-event"

View File

@@ -1,10 +1,10 @@
import { Schema } from "effect"
import { Prompt } from "@opencode-ai/core/session-prompt"
import { Prompt } from "./session-prompt"
import { SessionEvent } from "./session-event"
import { EventV2 } from "./event"
import { ToolOutput } from "@opencode-ai/core/tool-output"
import { V2Schema } from "@opencode-ai/core/v2-schema"
import { ModelV2 } from "@opencode-ai/core/model"
import { ToolOutput } from "./tool-output"
import { V2Schema } from "./v2-schema"
import { ModelV2 } from "./model"
export const ID = EventV2.ID
export type ID = Schema.Schema.Type<typeof ID>
@@ -20,7 +20,7 @@ const Base = {
export class AgentSwitched extends Schema.Class<AgentSwitched>("Session.Message.AgentSwitched")({
...Base,
type: Schema.Literal("agent-switched"),
agent: SessionEvent.AgentSwitched.fields.data.fields.agent,
agent: SessionEvent.AgentSwitched.data.fields.agent,
}) {}
export class ModelSwitched extends Schema.Class<ModelSwitched>("Session.Message.ModelSwitched")({
@@ -43,16 +43,16 @@ export class User extends Schema.Class<User>("Session.Message.User")({
export class Synthetic extends Schema.Class<Synthetic>("Session.Message.Synthetic")({
...Base,
sessionID: SessionEvent.Synthetic.fields.data.fields.sessionID,
text: SessionEvent.Synthetic.fields.data.fields.text,
sessionID: SessionEvent.Synthetic.data.fields.sessionID,
text: SessionEvent.Synthetic.data.fields.text,
type: Schema.Literal("synthetic"),
}) {}
export class Shell extends Schema.Class<Shell>("Session.Message.Shell")({
...Base,
type: Schema.Literal("shell"),
callID: SessionEvent.Shell.Started.fields.data.fields.callID,
command: SessionEvent.Shell.Started.fields.data.fields.command,
callID: SessionEvent.Shell.Started.data.fields.callID,
command: SessionEvent.Shell.Started.data.fields.command,
output: Schema.String,
time: Schema.Struct({
created: V2Schema.DateTimeUtcFromMillis,
@@ -130,7 +130,7 @@ export class Assistant extends Schema.Class<Assistant>("Session.Message.Assistan
...Base,
type: Schema.Literal("assistant"),
agent: Schema.String,
model: SessionEvent.Step.Started.fields.data.fields.model,
model: SessionEvent.Step.Started.data.fields.model,
content: AssistantContent.pipe(Schema.Array),
snapshot: Schema.Struct({
start: Schema.String.pipe(Schema.optional),
@@ -147,7 +147,7 @@ export class Assistant extends Schema.Class<Assistant>("Session.Message.Assistan
write: Schema.Finite,
}),
}).pipe(Schema.optional),
error: SessionEvent.Step.Failed.fields.data.fields.error.pipe(Schema.optional),
error: SessionEvent.Step.Failed.data.fields.error.pipe(Schema.optional),
time: Schema.Struct({
created: V2Schema.DateTimeUtcFromMillis,
completed: V2Schema.DateTimeUtcFromMillis.pipe(Schema.optional),
@@ -156,7 +156,7 @@ export class Assistant extends Schema.Class<Assistant>("Session.Message.Assistan
export class Compaction extends Schema.Class<Compaction>("Session.Message.Compaction")({
type: Schema.Literal("compaction"),
reason: SessionEvent.Compaction.Started.fields.data.fields.reason,
reason: SessionEvent.Compaction.Started.data.fields.reason,
summary: Schema.String,
include: Schema.String.pipe(Schema.optional),
...Base,

View File

@@ -0,0 +1,13 @@
export * as Session from "./session"
import { Schema } from "effect"
import { withStatics } from "./schema"
import { Identifier } from "./util/identifier"
export const ID = Schema.String.check(Schema.isStartsWith("ses")).pipe(
Schema.brand("SessionID"),
withStatics((schema) => ({
descending: (id?: string) => schema.make(id ?? "ses_" + Identifier.descending()),
})),
)
export type ID = typeof ID.Type

View File

@@ -20,6 +20,7 @@ const levelPriority: Record<Level, number> = {
ERROR: 3,
}
const keep = 10
const initializedRunID = "OPENCODE_LOG_INITIALIZED_RUN_ID"
let level: Level = "INFO"
@@ -70,7 +71,10 @@ export async function init(options: Options) {
Global.Path.log,
options.dev ? "dev.log" : new Date().toISOString().split(".")[0].replace(/:/g, "") + ".log",
)
await fs.truncate(logpath).catch(() => {})
const runID = process.env.OPENCODE_RUN_ID
const shouldTruncate = !options.dev || !runID || process.env[initializedRunID] !== runID
if (shouldTruncate) await fs.truncate(logpath).catch(() => {})
if (options.dev && runID) process.env[initializedRunID] = runID
const stream = createWriteStream(logpath, { flags: "a" })
write = async (msg: any) => {
return new Promise((resolve, reject) => {

View File

@@ -1,12 +1,21 @@
import { describe, expect } from "bun:test"
import { DateTime, Effect, Layer, Option } from "effect"
import { DateTime, Effect, Fiber, Layer, Option, Stream } from "effect"
import { Catalog } from "@opencode-ai/core/catalog"
import { EventV2 } from "@opencode-ai/core/event"
import { Location } from "@opencode-ai/core/location"
import { ModelV2 } from "@opencode-ai/core/model"
import { PluginV2 } from "@opencode-ai/core/plugin"
import { ProviderV2 } from "@opencode-ai/core/provider"
import { testEffect } from "../lib/effect"
import { testEffect } from "./lib/effect"
const it = testEffect(Catalog.layer.pipe(Layer.provideMerge(PluginV2.defaultLayer)))
const locationLayer = Layer.succeed(Location.Service, Location.Service.of({ directory: "test" }))
const it = testEffect(
Catalog.layer.pipe(
Layer.provideMerge(EventV2.defaultLayer),
Layer.provideMerge(PluginV2.defaultLayer),
Layer.provideMerge(locationLayer),
),
)
describe("CatalogV2", () => {
it.effect("normalizes provider baseURL into endpoint url", () =>
@@ -67,6 +76,31 @@ describe("CatalogV2", () => {
}),
)
it.effect("publishes model updated events", () =>
Effect.gen(function* () {
const catalog = yield* Catalog.Service
const events = yield* EventV2.Service
const providerID = ProviderV2.ID.make("test")
const modelID = ModelV2.ID.make("model")
const fiber = yield* events
.subscribe(Catalog.Event.ModelUpdated)
.pipe(Stream.take(1), Stream.runCollect, Effect.forkScoped)
yield* Effect.yieldNow
yield* catalog.provider.update(providerID, () => {})
yield* catalog.model.update(providerID, modelID, (model) => {
model.name = "Updated Model"
})
const event = Array.from(yield* Fiber.join(fiber))[0]
expect(event?.type).toBe("catalog.model.updated")
expect(event?.data.model.providerID).toBe(providerID)
expect(event?.data.model.id).toBe(modelID)
expect(event?.data.model.name).toBe("Updated Model")
expect(event?.location).toEqual({ directory: "test" })
}),
)
it.effect("resolves unknown model endpoint from provider endpoint", () =>
Effect.gen(function* () {
const catalog = yield* Catalog.Service

View File

@@ -0,0 +1,132 @@
import { describe, expect } from "bun:test"
import { Effect, Fiber, Layer, Schema, Stream } from "effect"
import { EventV2 } from "@opencode-ai/core/event"
import { Location } from "@opencode-ai/core/location"
import { testEffect } from "./lib/effect"
const locationLayer = Layer.succeed(
Location.Service,
Location.Service.of({ directory: "project", workspaceID: "workspace" }),
)
const it = testEffect(EventV2.layer.pipe(Layer.provideMerge(locationLayer)))
const itWithoutLocation = testEffect(EventV2.layer)
const Message = EventV2.define({
type: "test.message",
schema: {
text: Schema.String,
},
})
const GlobalMessage = EventV2.define({
type: "test.global",
schema: {
text: Schema.String,
},
})
const VersionedMessage = EventV2.define({
type: "test.versioned",
version: 2,
schema: {
text: Schema.String,
},
})
describe("EventV2", () => {
it.effect("publishes events with the current location", () =>
Effect.gen(function* () {
const events = yield* EventV2.Service
const fiber = yield* events.subscribe(Message).pipe(Stream.take(1), Stream.runCollect, Effect.forkScoped)
yield* Effect.yieldNow
const event = yield* events.publish(Message, { text: "hello" })
const received = Array.from(yield* Fiber.join(fiber))
expect(received).toEqual([event])
expect(event.type).toBe("test.message")
expect(event).not.toHaveProperty("version")
expect(event.data).toEqual({ text: "hello" })
expect(event.location).toEqual({ directory: "project", workspaceID: "workspace" })
}),
)
itWithoutLocation.effect("omits location when no location is available", () =>
Effect.gen(function* () {
const events = yield* EventV2.Service
const event = yield* events.publish(GlobalMessage, { text: "hello" })
expect(event).not.toHaveProperty("location")
expect(event.type).toBe("test.global")
}),
)
it.effect("publishes definition version", () =>
Effect.gen(function* () {
const events = yield* EventV2.Service
const event = yield* events.publish(VersionedMessage, { text: "hello" })
expect(event.type).toBe("test.versioned")
expect(event.version).toBe(2)
}),
)
it.effect("stores definitions in the exported registry", () =>
Effect.sync(() => {
expect(EventV2.registry.get(Message.type)).toBe(Message)
}),
)
it.effect("publishes to typed and wildcard subscriptions", () =>
Effect.gen(function* () {
const events = yield* EventV2.Service
const typed = yield* events.subscribe(Message).pipe(Stream.take(1), Stream.runCollect, Effect.forkScoped)
const wildcard = yield* events.all().pipe(Stream.take(1), Stream.runCollect, Effect.forkScoped)
yield* Effect.yieldNow
const event = yield* events.publish(Message, { text: "hello" })
expect(Array.from(yield* Fiber.join(typed))).toEqual([event])
expect(Array.from(yield* Fiber.join(wildcard))).toEqual([event])
}),
)
it.effect("runs sync handlers inline", () =>
Effect.gen(function* () {
const events = yield* EventV2.Service
const received = new Array<EventV2.Payload>()
const unsubscribe = yield* events.sync((event) =>
Effect.sync(() => {
received.push(event)
}),
)
const event = yield* events.publish(Message, { text: "hello" })
yield* unsubscribe
yield* events.publish(Message, { text: "after unsubscribe" })
expect(received).toEqual([event])
}),
)
it.effect("runs sync handlers before publishing to streams", () =>
Effect.gen(function* () {
const events = yield* EventV2.Service
const received = new Array<string>()
const fiber = yield* events.all().pipe(
Stream.take(1),
Stream.runForEach(() => Effect.sync(() => received.push("stream"))),
Effect.forkScoped,
)
yield* events.sync((event) =>
Effect.sync(() => {
received.push(event.type)
}),
)
yield* Effect.yieldNow
yield* events.publish(Message, { text: "hello" })
yield* Fiber.join(fiber)
expect(received).toEqual([Message.type, "stream"])
}),
)
})

View File

@@ -4,8 +4,8 @@ import { HttpClient, HttpClientResponse } from "effect/unstable/http"
import { AppFileSystem } from "@opencode-ai/core/filesystem"
import { Flag } from "@opencode-ai/core/flag/flag"
import { Global } from "@opencode-ai/core/global"
import { ModelsDev } from "../../src/provider/models"
import { it } from "../lib/effect"
import { ModelsDev } from "@opencode-ai/core/models"
import { it } from "./lib/effect"
import { rm, writeFile, utimes, mkdir } from "fs/promises"
import path from "path"
@@ -70,13 +70,16 @@ const fixture2: Record<string, ModelsDev.Provider> = {
interface MockState {
body: string
status: number
calls: Array<{ url: string }>
calls: Array<{ url: string; userAgent: string | null }>
}
const makeMockClient = (state: Ref.Ref<MockState>) =>
HttpClient.make((request) =>
Effect.gen(function* () {
yield* Ref.update(state, (s) => ({ ...s, calls: [...s.calls, { url: request.url }] }))
yield* Ref.update(state, (s) => ({
...s,
calls: [...s.calls, { url: request.url, userAgent: request.headers["user-agent"] ?? null }],
}))
const s = yield* Ref.get(state)
return HttpClientResponse.fromWeb(request, new Response(s.body, { status: s.status }))
}),
@@ -133,14 +136,14 @@ describe("ModelsDev Service", () => {
}),
)
it.live("get() returns {} when disk empty and fetch disabled", () =>
it.live("get() returns bundled snapshot when disk empty and fetch disabled", () =>
Effect.gen(function* () {
const state = yield* Ref.make(initialState)
const result = yield* provided(
state,
ModelsDev.Service.use((s) => s.get()),
)
expect(result).toEqual({})
expect(Object.keys(result).length).toBeGreaterThan(0)
const final = yield* Ref.get(state)
expect(final.calls).toEqual([])
}),
@@ -202,6 +205,7 @@ describe("ModelsDev Service", () => {
const final = yield* Ref.get(state)
expect(final.calls.length).toBe(1)
expect(final.calls[0].url).toContain("/api.json")
expect(final.calls[0].userAgent).toContain("/cli")
}),
)
@@ -251,7 +255,7 @@ describe("ModelsDev Service", () => {
}),
)
expect(result).toEqual(fixture)
// withTransientReadRetry retries 5xx, so calls may be > 1.
// retryTransient retries 5xx, so calls may be > 1.
const final = yield* Ref.get(state)
expect(final.calls.length).toBeGreaterThanOrEqual(1)
}),

View File

@@ -4,7 +4,7 @@ import { AuthV2 } from "@opencode-ai/core/auth"
import { PluginV2 } from "@opencode-ai/core/plugin"
import { AuthPlugin } from "@opencode-ai/core/plugin/auth"
import { AzurePlugin } from "@opencode-ai/core/plugin/provider/azure"
import { testEffect } from "../../lib/effect"
import { testEffect } from "../lib/effect"
import { fakeSelectorSdk, it, model, npmLayer, provider, withEnv } from "./provider-helper"
const itWithAuth = testEffect(Layer.mergeAll(PluginV2.defaultLayer, AuthV2.defaultLayer, npmLayer))

View File

@@ -5,7 +5,7 @@ import { ModelV2 } from "@opencode-ai/core/model"
import { PluginV2 } from "@opencode-ai/core/plugin"
import { AuthPlugin } from "@opencode-ai/core/plugin/auth"
import { CloudflareWorkersAIPlugin } from "@opencode-ai/core/plugin/provider/cloudflare-workers-ai"
import { testEffect } from "../../lib/effect"
import { testEffect } from "../lib/effect"
import { fakeSelectorSdk, it, model, npmLayer, provider, withEnv } from "./provider-helper"
const itWithAuth = testEffect(Layer.mergeAll(PluginV2.defaultLayer, AuthV2.defaultLayer, npmLayer))

View File

@@ -3,7 +3,7 @@ import { Effect, Layer } from "effect"
import { AISDK } from "@opencode-ai/core/aisdk"
import { PluginV2 } from "@opencode-ai/core/plugin"
import { DeepInfraPlugin } from "@opencode-ai/core/plugin/provider/deepinfra"
import { testEffect } from "../../lib/effect"
import { testEffect } from "../lib/effect"
import { it, model } from "./provider-helper"
const itAISDK = testEffect(Layer.provideMerge(AISDK.layer, PluginV2.defaultLayer))

View File

@@ -9,7 +9,7 @@ import { AISDK } from "@opencode-ai/core/aisdk"
import { ModelV2 } from "@opencode-ai/core/model"
import { PluginV2 } from "@opencode-ai/core/plugin"
import { DynamicProviderPlugin } from "@opencode-ai/core/plugin/provider/dynamic"
import { testEffect } from "../../lib/effect"
import { testEffect } from "../lib/effect"
import { fixtureProvider, it, model, npmLayer } from "./provider-helper"
const fixtureProviderPath = fileURLToPath(fixtureProvider)

View File

@@ -3,7 +3,7 @@ import { Effect } from "effect"
import { ModelV2 } from "@opencode-ai/core/model"
import { PluginV2 } from "@opencode-ai/core/plugin"
import { GithubCopilotPlugin } from "@opencode-ai/core/plugin/provider/github-copilot"
import { fakeSelectorSdk, it, model } from "../v2/plugin/provider-helper"
import { fakeSelectorSdk, it, model } from "./provider-helper"
describe("GithubCopilotPlugin", () => {
it.effect("creates the bundled Copilot SDK for the GitHub Copilot package", () =>

View File

@@ -4,7 +4,7 @@ import { AuthV2 } from "@opencode-ai/core/auth"
import { PluginV2 } from "@opencode-ai/core/plugin"
import { AuthPlugin } from "@opencode-ai/core/plugin/auth"
import { GitLabPlugin } from "@opencode-ai/core/plugin/provider/gitlab"
import { testEffect } from "../../lib/effect"
import { testEffect } from "../lib/effect"
import { it, model, npmLayer, provider, withEnv } from "./provider-helper"
const gitlabSDKOptions: Record<string, unknown>[] = []

View File

@@ -4,7 +4,7 @@ import { AISDK } from "@opencode-ai/core/aisdk"
import { ModelV2 } from "@opencode-ai/core/model"
import { PluginV2 } from "@opencode-ai/core/plugin"
import { GooglePlugin } from "@opencode-ai/core/plugin/provider/google"
import { testEffect } from "../../lib/effect"
import { testEffect } from "../lib/effect"
import { it, model } from "./provider-helper"
const itWithAISDK = testEffect(AISDK.layer.pipe(Layer.provideMerge(PluginV2.defaultLayer)))

View File

@@ -6,7 +6,7 @@ import { ModelV2 } from "@opencode-ai/core/model"
import { PluginV2 } from "@opencode-ai/core/plugin"
import { GroqPlugin } from "@opencode-ai/core/plugin/provider/groq"
import { it, model } from "./provider-helper"
import { testEffect } from "../../lib/effect"
import { testEffect } from "../lib/effect"
const aisdkIt = testEffect(AISDK.layer.pipe(Layer.provideMerge(PluginV2.defaultLayer)))

View File

@@ -5,7 +5,7 @@ import { Effect, Layer, Option } from "effect"
import { ModelV2 } from "@opencode-ai/core/model"
import { PluginV2 } from "@opencode-ai/core/plugin"
import { ProviderV2 } from "@opencode-ai/core/provider"
import { testEffect } from "../../lib/effect"
import { testEffect } from "../lib/effect"
export const fixtureProvider = new URL("./fixtures/provider-factory.ts", import.meta.url).href

View File

@@ -0,0 +1,93 @@
import { describe, expect } from "bun:test"
import { Effect } from "effect"
import { PluginV2 } from "@opencode-ai/core/plugin"
import { ProviderPlugins } from "@opencode-ai/core/plugin/provider"
import { NvidiaPlugin } from "@opencode-ai/core/plugin/provider/nvidia"
import { expectPluginRegistered, it, provider } from "./provider-helper"
describe("NvidiaPlugin", () => {
it.effect("is registered so legacy referer headers can be applied", () =>
Effect.sync(() =>
expectPluginRegistered(
ProviderPlugins.map((item) => item.id),
"nvidia",
),
),
)
it.effect("applies NVIDIA tracking headers only to nvidia", () =>
Effect.gen(function* () {
const plugin = yield* PluginV2.Service
yield* plugin.add(NvidiaPlugin)
const result = yield* plugin.trigger(
"provider.update",
{},
{
provider: provider("nvidia", {
options: { headers: { Existing: "value" }, body: {}, aisdk: { provider: {}, request: {} } },
}),
cancel: false,
},
)
const ignored = yield* plugin.trigger("provider.update", {}, { provider: provider("openrouter"), cancel: false })
expect(result.provider.options.headers).toEqual({
Existing: "value",
"HTTP-Referer": "https://opencode.ai/",
"X-Title": "opencode",
"X-BILLING-INVOKE-ORIGIN": "OpenCode",
})
expect(ignored.provider.options.headers).toEqual({})
}),
)
it.effect("adds billing origin for custom NVIDIA endpoints", () =>
Effect.gen(function* () {
const plugin = yield* PluginV2.Service
yield* plugin.add(NvidiaPlugin)
const result = yield* plugin.trigger(
"provider.update",
{},
{
provider: provider("nvidia", {
endpoint: { type: "aisdk", package: "test-provider", url: "http://localhost:8000/v1" },
options: { headers: {}, body: {}, aisdk: { provider: {}, request: {} } },
}),
cancel: false,
},
)
expect(result.provider.options.headers).toEqual({
"HTTP-Referer": "https://opencode.ai/",
"X-Title": "opencode",
"X-BILLING-INVOKE-ORIGIN": "OpenCode",
})
}),
)
it.effect("preserves an explicit NVIDIA billing origin header", () =>
Effect.gen(function* () {
const plugin = yield* PluginV2.Service
yield* plugin.add(NvidiaPlugin)
const result = yield* plugin.trigger(
"provider.update",
{},
{
provider: provider("nvidia", {
options: {
headers: { "X-BILLING-INVOKE-ORIGIN": "CustomOrigin" },
body: {},
aisdk: { provider: { baseURL: "https://integrate.api.nvidia.com/v1" }, request: {} },
},
}),
cancel: false,
},
)
expect(result.provider.options.headers).toEqual({
"HTTP-Referer": "https://opencode.ai/",
"X-Title": "opencode",
"X-BILLING-INVOKE-ORIGIN": "CustomOrigin",
})
}),
)
})

View File

@@ -1,6 +1,7 @@
import { describe, expect } from "bun:test"
import { DateTime, Effect, Option } from "effect"
import { DateTime, Effect, Layer, Option } from "effect"
import { Catalog } from "@opencode-ai/core/catalog"
import { Location } from "@opencode-ai/core/location"
import { ModelV2 } from "@opencode-ai/core/model"
import { PluginV2 } from "@opencode-ai/core/plugin"
import { OpencodePlugin } from "@opencode-ai/core/plugin/provider/opencode"
@@ -8,6 +9,7 @@ import { ProviderV2 } from "@opencode-ai/core/provider"
import { it, model, provider, withEnv } from "./provider-helper"
const cost = (input: number, output = 0) => [{ input, output, cache: { read: 0, write: 0 } }]
const locationLayer = Layer.succeed(Location.Service, Location.Service.of({ directory: "test" }))
describe("OpencodePlugin", () => {
it.effect("uses a public key and cancels paid models without credentials", () =>
@@ -190,6 +192,6 @@ describe("OpencodePlugin", () => {
const selected = yield* catalog.model.small(providerID)
expect(Option.getOrUndefined(selected)?.id).toBe(ModelV2.ID.make("gpt-5-nano"))
}).pipe(Effect.provide(Catalog.defaultLayer)),
}).pipe(Effect.provide(Catalog.defaultLayer.pipe(Layer.provide(locationLayer)))),
)
})

View File

@@ -4,7 +4,7 @@ import { ModelV2 } from "@opencode-ai/core/model"
import { PluginV2 } from "@opencode-ai/core/plugin"
import { XAIPlugin } from "@opencode-ai/core/plugin/provider/xai"
import { ProviderV2 } from "@opencode-ai/core/provider"
import { testEffect } from "../../lib/effect"
import { testEffect } from "../lib/effect"
import { fakeSelectorSdk } from "./provider-helper"
const it = testEffect(PluginV2.defaultLayer)

Some files were not shown because too many files have changed in this diff Show More