Compare commits

...

7 Commits

Author SHA1 Message Date
Nikhil Sonti
9371819728 chore: self-review fixes 2026-04-22 17:07:21 -07:00
Nikhil Sonti
f2eef66b9c chore(build-tools): sweep orphaned references to retired disk pipeline 2026-04-22 17:03:26 -07:00
Nikhil Sonti
76dab537d5 docs(build-tools): Lima template dev loop + record D9
Updated the build-tools README in this worktree. Also recorded D9 in the canonical external spec file at /Users/shadowfax/llm/code/browseros-project/grove-ref/browseros-main/specs/decisions.md, which is outside this git checkout.
2026-04-22 17:02:00 -07:00
Nikhil Sonti
20dcab1a7f feat(build): stage Lima template into server resources
Verified local-resource staging with: bun scripts/build/server.ts --target=darwin-arm64 --ci. The template was copied to dist/prod/server/darwin-arm64/resources/vm/browseros-vm.yaml and included in the zip. bun run build:server:test still fails on the pre-existing R2 limactl resource with: The specified key does not exist.
2026-04-22 16:59:47 -07:00
Nikhil Sonti
9805a18535 feat(build-tools): rename VmManifest to AgentManifest, drop disk fields 2026-04-22 16:57:11 -07:00
Nikhil Sonti
ed35e628bc feat(build-tools): remove build-disk pipeline and recipe directory
Task 2 verification removed the scripts, recipe directory, workflow, and package scripts. Typecheck remains green here because manifest disk fields are removed in the next task, so the plan's expected missing-import failure does not apply yet.
2026-04-22 16:52:57 -07:00
Nikhil Sonti
d0ce141d8a feat(build-tools): add Lima template for BrowserOS VM 2026-04-22 16:51:23 -07:00
21 changed files with 223 additions and 764 deletions

View File

@@ -16,7 +16,6 @@ on:
pull_request:
paths:
- "packages/browseros-agent/packages/build-tools/**"
- "!packages/browseros-agent/packages/build-tools/scripts/build-disk.ts"
- ".github/workflows/build-agent.yml"
env:

View File

@@ -1,179 +0,0 @@
name: build-vm
on:
workflow_dispatch:
inputs:
version:
description: "VM version (e.g. 2026.04.22)"
required: true
type: string
publish:
description: "Upload to R2 and merge manifest slice"
required: false
default: false
type: boolean
pull_request:
paths:
- "packages/browseros-agent/packages/build-tools/**"
- "!packages/browseros-agent/packages/build-tools/scripts/build-tarball.ts"
- ".github/workflows/build-vm.yml"
env:
BUN_VERSION: "1.3.6"
PKG_DIR: packages/browseros-agent/packages/build-tools
permissions:
contents: read
jobs:
check:
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v4
- uses: oven-sh/setup-bun@v2
with:
bun-version: ${{ env.BUN_VERSION }}
- working-directory: packages/browseros-agent
run: bun install --frozen-lockfile
- working-directory: packages/browseros-agent
run: bun run --filter @browseros/build-tools typecheck
- working-directory: packages/browseros-agent
run: bun run --filter @browseros/build-tools test
build:
needs: check
if: ${{ github.event_name == 'workflow_dispatch' }}
strategy:
fail-fast: false
matrix:
include:
- arch: arm64
runner: ubuntu-24.04-arm
runs-on: ${{ matrix.runner }}
steps:
- uses: actions/checkout@v4
- uses: oven-sh/setup-bun@v2
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Install libguestfs, qemu, and zstd
run: |
sudo apt-get update
sudo apt-get install -y libguestfs-tools qemu-utils zstd
sudo chmod 0644 /boot/vmlinuz-* /boot/initrd.img-* || true
[ -e /dev/kvm ] && sudo chmod 0666 /dev/kvm || true
[ -x /usr/bin/passt ] && sudo mv /usr/bin/passt /usr/bin/passt.disabled || true
echo "LIBGUESTFS_BACKEND=direct" >> "$GITHUB_ENV"
- working-directory: packages/browseros-agent
run: bun install --frozen-lockfile
- name: Validate VM version input
if: ${{ github.event_name == 'workflow_dispatch' }}
working-directory: ${{ env.PKG_DIR }}
env:
VERSION: ${{ inputs.version }}
run: |
set -euo pipefail
bundle_version="$(bun -e "const b = await Bun.file('bundle.json').json(); console.log(b.vmVersion)")"
if [ "$VERSION" != "$bundle_version" ]; then
echo "inputs.version ($VERSION) must match bundle.json vmVersion ($bundle_version)" >&2
exit 1
fi
- name: Build disk
working-directory: ${{ env.PKG_DIR }}
env:
VERSION: ${{ inputs.version || format('pr-{0}', github.event.pull_request.number) }}
OUT: ${{ github.workspace }}/dist/vm
run: bun run build:disk -- --version "$VERSION" --arch "${{ matrix.arch }}" --output-dir "$OUT"
- uses: actions/upload-artifact@v4
with:
name: vm-disk-${{ matrix.arch }}
path: dist/vm/
retention-days: 7
smoke:
needs: build
if: ${{ github.event_name == 'workflow_dispatch' }}
runs-on: ubuntu-24.04-arm
steps:
- uses: actions/checkout@v4
- uses: oven-sh/setup-bun@v2
with:
bun-version: ${{ env.BUN_VERSION }}
- uses: actions/download-artifact@v4
with:
name: vm-disk-arm64
path: dist/vm
- name: Install qemu, zstd, curl, and Lima
run: |
set -euo pipefail
sudo apt-get update
sudo apt-get install -y qemu-system-arm qemu-utils zstd curl
lima_version="$(cat "${PKG_DIR}/recipe/LIMA_VERSION.pin")"
lima_sha256="$(cat "${PKG_DIR}/recipe/LIMA_LINUX_AARCH64_SHA256.pin")"
curl -fsSL -o /tmp/lima.tar.gz \
"https://github.com/lima-vm/lima/releases/download/${lima_version}/lima-${lima_version#v}-Linux-aarch64.tar.gz"
echo "${lima_sha256} /tmp/lima.tar.gz" | sha256sum --check --strict
sudo tar -C /usr/local -xzf /tmp/lima.tar.gz
- working-directory: packages/browseros-agent
run: bun install --frozen-lockfile
- name: Smoke test VM disk
working-directory: ${{ env.PKG_DIR }}
run: |
set -euo pipefail
qcow="$(find "$GITHUB_WORKSPACE/dist/vm" -name 'browseros-vm-*-arm64.qcow2.zst' -print -quit)"
if [ -z "$qcow" ]; then
echo "missing arm64 VM disk artifact" >&2
exit 1
fi
bun run smoke:vm -- --arch arm64 --qcow "$qcow" --limactl /usr/local/bin/limactl
publish:
needs: [build, smoke]
if: ${{ github.event_name == 'workflow_dispatch' && inputs.publish == true }}
runs-on: ubuntu-24.04
environment: release
concurrency:
group: r2-manifest-publish
cancel-in-progress: false
steps:
- uses: actions/checkout@v4
- uses: oven-sh/setup-bun@v2
with:
bun-version: ${{ env.BUN_VERSION }}
- uses: actions/download-artifact@v4
with:
pattern: vm-disk-*
path: dist
merge-multiple: true
- working-directory: packages/browseros-agent
run: bun install --frozen-lockfile
- name: Upload VM disks to R2
working-directory: ${{ env.PKG_DIR }}
env:
R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}
R2_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
R2_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
R2_BUCKET: ${{ secrets.R2_BUCKET }}
run: |
set -euo pipefail
for file in "$GITHUB_WORKSPACE"/dist/*.qcow2.zst; do
base="$(basename "$file")"
bun run upload -- --file "$file" --key "vm/$base" --content-type "application/zstd" --sidecar-sha
done
- name: Merge VM slice into manifest
working-directory: ${{ env.PKG_DIR }}
env:
R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}
R2_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
R2_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
R2_BUCKET: ${{ secrets.R2_BUCKET }}
run: |
set -euo pipefail
mkdir -p dist
cp -R "$GITHUB_WORKSPACE"/dist/* dist/
bun run download -- --key vm/manifest.json --out dist/baseline-manifest.json
bun run emit-manifest -- \
--slice vm \
--dist-dir dist \
--merge-from dist/baseline-manifest.json \
--out dist/manifest.json
bun run upload -- --file dist/manifest.json --key vm/manifest.json --content-type "application/json"

View File

@@ -1,6 +1,8 @@
# @browseros/build-tools
Builds BrowserOS VM disks and agent image tarballs, publishes release artifacts to R2, and hydrates the local VM artifact cache for development.
Builds agent image tarballs, publishes release artifacts to R2, and hydrates the local dev cache for agent tarballs.
The BrowserOS VM is defined by a committed Lima template at `template/browseros-vm.yaml`. There is no custom disk build step; `limactl` consumes the template directly at runtime.
## Setup
@@ -9,13 +11,28 @@ cp packages/build-tools/.env.sample packages/build-tools/.env
bun install
```
## Build a VM disk
## Dev loop against the Lima template
Requires `libguestfs`, `qemu-img`, and `zstd` in an arm64 Linux environment.
On Apple Silicon, run this from an arm64 Lima/Debian VM rather than directly on macOS.
Requires `limactl` on PATH. It is bundled with the server; for bare-worktree use, install Lima with Homebrew.
```bash
bun run --filter @browseros/build-tools build:disk -- --version 2026.04.22 --arch arm64
brew install lima
```
```bash
limactl start \
--name browseros-vm-dev \
packages/browseros-agent/packages/build-tools/template/browseros-vm.yaml
limactl shell browseros-vm-dev podman info
SOCK="$(limactl list browseros-vm-dev --format '{{.Dir}}')/sock/podman.sock"
curl --unix-socket "$SOCK" http://d/v5.0.0/libpod/_ping
bun run --filter @browseros/build-tools build:tarball -- --agent openclaw --arch arm64
limactl shell browseros-vm-dev podman load -i "$(ls dist/images/openclaw-*-arm64.tar.gz | head -1)"
limactl delete --force browseros-vm-dev
```
## Build an agent tarball
@@ -26,12 +43,9 @@ Requires `podman`.
bun run --filter @browseros/build-tools build:tarball -- --agent openclaw --arch arm64
```
## Smoke test artifacts
VM smoke tests require `limactl`, `qemu`, and `zstd`. Agent tarball smoke tests require `podman`.
## Smoke test an agent tarball
```bash
bun run --filter @browseros/build-tools smoke:vm -- --arch arm64 --qcow ./dist/browseros-vm-2026.04.22-arm64.qcow2.zst
bun run --filter @browseros/build-tools smoke:tarball -- --agent openclaw --arch arm64 --tarball ./dist/images/openclaw-2026.4.12-arm64.tar.gz
```
@@ -41,10 +55,9 @@ bun run --filter @browseros/build-tools smoke:tarball -- --agent openclaw --arch
bun run --filter @browseros/build-tools emit-manifest -- --dist-dir packages/build-tools/dist
```
Publish workflows can update only one manifest slice at a time. Sliced publishing requires an existing R2 `vm/manifest.json` baseline; bootstrap first releases with `--slice full`.
Publish workflows can update one agent slice at a time. Sliced publishing requires an existing R2 `vm/manifest.json` baseline; bootstrap first releases with `--slice full`.
```bash
bun run --filter @browseros/build-tools emit-manifest -- --slice vm --merge-from https://cdn.browseros.com/vm/manifest.json
bun run --filter @browseros/build-tools emit-manifest -- --slice agents:openclaw --merge-from https://cdn.browseros.com/vm/manifest.json
```
@@ -54,4 +67,4 @@ bun run --filter @browseros/build-tools emit-manifest -- --slice agents:openclaw
NODE_ENV=development bun run --filter @browseros/build-tools cache:sync
```
Development cache files land under `~/.browseros-dev/cache/vm/`. Production-mode cache files land under `~/.browseros/cache/vm/`.
Development cache files land under `~/.browseros-dev/cache/vm/images/`. Production-mode cache files land under `~/.browseros/cache/vm/images/`.

View File

@@ -1,5 +1,4 @@
{
"vmVersion": "2026.04.22",
"agents": [
{
"name": "openclaw",

View File

@@ -5,14 +5,12 @@
"type": "module",
"description": "BrowserOS release artifact producer and dev cache sync",
"scripts": {
"build:disk": "bun run scripts/build-disk.ts",
"build:tarball": "bun run scripts/build-tarball.ts",
"emit-manifest": "bun run scripts/emit-manifest.ts",
"upload": "bun run scripts/upload-to-r2.ts",
"download": "bun run scripts/download-from-r2.ts",
"cache:sync": "bun run scripts/cache-sync.ts",
"smoke:tarball": "bun run scripts/smoke-tarball.ts",
"smoke:vm": "bun run scripts/smoke-vm.ts",
"test": "bun test",
"typecheck": "tsc --noEmit"
},

View File

@@ -1 +0,0 @@
ef5acb5908f6ef1f7ffcf3a63913cdf618da3229ffa3b04e3727959e36bb9de1

View File

@@ -1 +0,0 @@
e879ce3547728da306bb0e634ee9f8309b8923b75873bf44cac161853b170f2b

View File

@@ -1,12 +0,0 @@
{
"arm64": {
"upstreamVersion": "20260413-2447",
"url": "https://cloud.debian.org/images/cloud/bookworm/20260413-2447/debian-12-genericcloud-arm64-20260413-2447.qcow2",
"sha512": "15ad6c52e255c84eb0e91001c5907b27199d8a7164d8ac172cfe9c92850dfaf606a6c3161d6af7f0fd5a5fef2aa8dcd9a23c2eb0fedbfcddb38e2bc306cba98f"
},
"x64": {
"upstreamVersion": "20260413-2447",
"url": "https://cloud.debian.org/images/cloud/bookworm/20260413-2447/debian-12-genericcloud-amd64-20260413-2447.qcow2",
"sha512": "db11b13c4efcc37828ffadae521d101e85079d349e1418074087bb7d306f11caccdc2b0b539d6fd50d623d40a898f83c6137268a048d7700397dc35b7dcbc927"
}
}

View File

@@ -1,19 +0,0 @@
# BrowserOS VM recipe — Debian 12 (bookworm) genericcloud
# Consumed by scripts/build-disk.ts. One virt-customize primitive per line.
# Ops: run-command | copy-in <src>:<dest-dir> | upload <src>:<dest-file> | write <path>:<content> | truncate <path>
# {version} and {manifest_tmp} are substituted at build time.
run-command apt-get update
run-command DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends podman crun fuse-overlayfs slirp4netns ca-certificates
run-command systemctl enable podman.socket
copy-in auth.json:/etc/containers/
run-command useradd --create-home --uid 1000 --shell /bin/bash browseros
run-command usermod -aG sudo browseros
copy-in sudoers-browseros:/etc/sudoers.d/
run-command chmod 0440 /etc/sudoers.d/sudoers-browseros
write /etc/browseros-vm-version:{version}
upload {manifest_tmp}:/etc/browseros-vm-manifest.json
run-command apt-get clean
run-command rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
run-command rm -f /etc/ssh/ssh_host_*
truncate /etc/machine-id

View File

@@ -1 +0,0 @@
browseros ALL=(ALL) NOPASSWD: ALL

View File

@@ -1,216 +0,0 @@
#!/usr/bin/env bun
import { createHash } from 'node:crypto'
import { createReadStream } from 'node:fs'
import {
copyFile,
mkdir,
readFile,
rm,
stat,
writeFile,
} from 'node:fs/promises'
import path from 'node:path'
import { parseArgs } from 'node:util'
import { $ } from 'bun'
import { type Arch, parseArch } from './common/arch'
import { fetchWithTimeout } from './common/fetch'
import { qcow2Key } from './common/manifest'
import { sha256File } from './common/sha256'
type ChunkSink = ReturnType<ReturnType<typeof Bun.file>['writer']>
const { values } = parseArgs({
args: Bun.argv.slice(2),
options: {
version: { type: 'string' },
arch: { type: 'string' },
'output-dir': { type: 'string', default: './dist' },
},
})
if (!values.version || !values.arch) {
console.error(
'usage: build:disk -- --version <YYYY.MM.DD[-N]> --arch <arm64|x64> [--output-dir ./dist]',
)
process.exit(1)
}
const arch = parseArch(values.arch)
const version = values.version
const outDir = values['output-dir']
const pkgRoot = path.resolve(import.meta.dir, '..')
await mkdir(outDir, { recursive: true })
const baseImages = JSON.parse(
await readFile(path.join(pkgRoot, 'recipe/base-images.json'), 'utf8'),
) as Record<Arch, { upstreamVersion: string; url: string; sha512: string }>
const base = baseImages[arch]
if (!base) throw new Error(`missing base image for arch ${arch}`)
const basePath = path.join(outDir, `base-${arch}.qcow2`)
const workPath = path.join(outDir, `work-${version}-${arch}.qcow2`)
const buildMarkerPath = path.join(outDir, `build-marker-${arch}.json`)
const recipePath = path.join(pkgRoot, 'recipe/browseros-vm.recipe')
const rawOut = path.join(outDir, `browseros-vm-${version}-${arch}.qcow2`)
const zstOut = `${rawOut}.zst`
try {
await download(base.url, basePath)
await verifySha512(basePath, base.sha512)
await copyFile(basePath, workPath)
await writeFile(
buildMarkerPath,
`${JSON.stringify({ name: 'browseros-vm', version, arch, phase: 'build' }, null, 2)}\n`,
)
const recipeText = await readFile(recipePath, 'utf8')
const args = composeVirtCustomizeArgs({
diskPath: workPath,
recipeText,
recipeDir: path.dirname(recipePath),
substitutions: { version, manifest_tmp: buildMarkerPath },
})
await spawnChecked(['virt-customize', ...args])
await $`virt-sparsify --in-place ${workPath}`.quiet()
await $`qemu-img convert -O qcow2 -c ${workPath} ${rawOut}`.quiet()
await $`zstd -19 --long=30 -T0 -f -o ${zstOut} ${rawOut}`.quiet()
const sha = await sha256File(zstOut)
const size = (await stat(zstOut)).size
await writeFile(`${zstOut}.sha256`, `${sha} ${path.basename(zstOut)}\n`)
console.log(
JSON.stringify(
{
key: qcow2Key(version, arch),
path: zstOut,
sha256: sha,
sizeBytes: size,
},
null,
2,
),
)
} finally {
await rm(workPath, { force: true })
await rm(basePath, { force: true })
await rm(rawOut, { force: true })
await rm(buildMarkerPath, { force: true })
}
function composeVirtCustomizeArgs(opts: {
diskPath: string
recipeText: string
recipeDir: string
substitutions: Record<string, string>
}): string[] {
const out = ['-a', opts.diskPath, '--network']
for (const rawLine of opts.recipeText.split('\n')) {
const line = rawLine.trim()
if (!line || line.startsWith('#')) continue
const spaceAt = line.indexOf(' ')
if (spaceAt === -1) throw new Error(`invalid recipe line: ${line}`)
const op = line.slice(0, spaceAt)
const rest = subst(line.slice(spaceAt + 1), opts.substitutions)
if (op === 'run-command') {
out.push('--run-command', rest)
continue
}
if (op === 'copy-in') {
const colonAt = rest.indexOf(':')
if (colonAt === -1) throw new Error(`invalid copy-in line: ${line}`)
const source = rest.slice(0, colonAt)
const target = rest.slice(colonAt + 1)
out.push('--copy-in', `${path.resolve(opts.recipeDir, source)}:${target}`)
continue
}
if (op === 'upload') {
const colonAt = rest.indexOf(':')
if (colonAt === -1) throw new Error(`invalid upload line: ${line}`)
const source = rest.slice(0, colonAt)
const target = rest.slice(colonAt + 1)
out.push('--upload', `${path.resolve(opts.recipeDir, source)}:${target}`)
continue
}
if (op === 'write') {
out.push('--write', rest)
continue
}
if (op === 'truncate') {
out.push('--truncate', rest)
continue
}
throw new Error(`unknown recipe op: ${op}`)
}
return out
}
function subst(value: string, vars: Record<string, string>): string {
return value.replace(/\{(\w+)\}/g, (_match, key: string) => {
const replacement = vars[key]
if (!replacement) throw new Error(`no substitution for {${key}}`)
return replacement
})
}
async function download(url: string, dest: string): Promise<void> {
const response = await fetchWithTimeout(url)
if (!response.ok || !response.body) {
throw new Error(`download failed: ${url} (${response.status})`)
}
const sink = Bun.file(dest).writer()
const reader = response.body.getReader()
try {
await pumpStream(reader, sink)
} finally {
await sink.end()
}
}
async function verifySha512(filePath: string, expected: string): Promise<void> {
const hash = createHash('sha512')
for await (const chunk of createReadStream(filePath)) {
hash.update(chunk)
}
const actual = hash.digest('hex')
if (actual !== expected) {
throw new Error(
`sha512 mismatch for ${filePath}: expected ${expected}, got ${actual}`,
)
}
}
async function spawnChecked(argv: string[]): Promise<void> {
const proc = Bun.spawn(argv, {
stdout: 'inherit',
stderr: 'inherit',
env: {
...process.env,
LIBGUESTFS_BACKEND: process.env.LIBGUESTFS_BACKEND ?? 'direct',
},
})
const code = await proc.exited
if (code !== 0) throw new Error(`${argv[0]} exited ${code}`)
}
async function pumpStream(
reader: ReadableStreamDefaultReader<Uint8Array>,
sink: ChunkSink,
): Promise<void> {
for (;;) {
const { done, value } = await reader.read()
if (done) break
sink.write(value)
}
}

View File

@@ -6,7 +6,7 @@ import { parseArgs } from 'node:util'
import { PATHS } from '@browseros/shared/constants/paths'
import { ARCHES, type Arch } from './common/arch'
import { fetchWithTimeout } from './common/fetch'
import type { Artifact, VmManifest } from './common/manifest'
import type { AgentManifest, Artifact } from './common/manifest'
import { verifySha256 } from './common/sha256'
type ChunkSink = ReturnType<ReturnType<typeof Bun.file>['writer']>
@@ -18,20 +18,13 @@ export interface PlanItem {
}
export function planSync(opts: {
local: VmManifest | null
remote: VmManifest
local: AgentManifest | null
remote: AgentManifest
cacheRoot: string
arches: Arch[]
}): PlanItem[] {
const out: PlanItem[] = []
for (const arch of opts.arches) {
maybeAdd(
out,
opts.remote.vmDisk[arch],
opts.local?.vmDisk[arch],
opts.cacheRoot,
)
for (const [name, agent] of Object.entries(opts.remote.agents)) {
maybeAdd(
out,
@@ -76,20 +69,18 @@ if (import.meta.main) {
`manifest fetch failed: ${manifestUrl} (${response.status})`,
)
}
const remote = (await response.json()) as VmManifest
const remote = (await response.json()) as AgentManifest
const localManifestPath = path.join(cacheRoot, 'vm', 'manifest.json')
const local = await readLocalManifest(localManifestPath)
const plan = planSync({ local, remote, cacheRoot, arches })
if (plan.length === 0) {
console.log(`cache up to date at vmVersion ${remote.vmVersion}`)
console.log('agent cache up to date')
process.exit(0)
}
console.log(
`syncing ${plan.length} artifact(s) for vmVersion ${remote.vmVersion}`,
)
console.log(`syncing ${plan.length} agent artifact(s)`)
for (const item of plan) {
await mkdir(path.dirname(item.destPath), { recursive: true })
const partial = `${item.destPath}.partial`
@@ -128,9 +119,9 @@ function getCacheDir(): string {
export async function readLocalManifest(
manifestPath: string,
): Promise<VmManifest | null> {
): Promise<AgentManifest | null> {
try {
return JSON.parse(await readFile(manifestPath, 'utf8')) as VmManifest
return JSON.parse(await readFile(manifestPath, 'utf8')) as AgentManifest
} catch (error) {
if ((error as NodeJS.ErrnoException).code === 'ENOENT') return null
throw error

View File

@@ -12,11 +12,9 @@ export interface AgentEntry {
tarballs: Record<Arch, Artifact>
}
export interface VmManifest {
schemaVersion: 1
vmVersion: string
export interface AgentManifest {
schemaVersion: 2
updatedAt: string
vmDisk: Record<Arch, Artifact>
agents: Record<string, AgentEntry>
}
@@ -27,7 +25,6 @@ export interface BundleAgent {
}
export interface Bundle {
vmVersion: string
agents: BundleAgent[]
}
@@ -37,14 +34,9 @@ export interface ArtifactInput {
}
export interface ArtifactInputs {
vmDisk: Record<Arch, ArtifactInput>
agents: Record<string, Record<Arch, ArtifactInput>>
}
export function qcow2Key(vmVersion: string, arch: Arch): string {
return `vm/browseros-vm-${vmVersion}-${arch}.qcow2.zst`
}
export function tarballKey(name: string, version: string, arch: Arch): string {
return `vm/images/${name}-${version}-${arch}.tar.gz`
}
@@ -53,18 +45,7 @@ export function buildManifest(
bundle: Bundle,
inputs: ArtifactInputs,
now: Date = new Date(),
): VmManifest {
const vmDisk = {} as Record<Arch, Artifact>
for (const arch of ARCHES) {
const entry = inputs.vmDisk[arch]
if (!entry) throw new Error(`missing vmDisk inputs for arch ${arch}`)
vmDisk[arch] = {
key: qcow2Key(bundle.vmVersion, arch),
sha256: entry.sha256,
sizeBytes: entry.sizeBytes,
}
}
): AgentManifest {
const agents: Record<string, AgentEntry> = {}
for (const agent of bundle.agents) {
const tarballs = {} as Record<Arch, Artifact>
@@ -87,10 +68,8 @@ export function buildManifest(
}
return {
schemaVersion: 1,
vmVersion: bundle.vmVersion,
schemaVersion: 2,
updatedAt: now.toISOString(),
vmDisk,
agents,
}
}

View File

@@ -2,18 +2,16 @@
import { mkdir, readFile, stat, writeFile } from 'node:fs/promises'
import path from 'node:path'
import { parseArgs } from 'node:util'
import { ARCHES, type Arch } from './common/arch'
import { ARCHES } from './common/arch'
import { fetchWithTimeout } from './common/fetch'
import {
type AgentEntry,
type Artifact,
type AgentManifest,
type ArtifactInputs,
type Bundle,
type BundleAgent,
buildManifest,
qcow2Key,
tarballKey,
type VmManifest,
} from './common/manifest'
import { sha256File } from './common/sha256'
@@ -34,6 +32,10 @@ const bundle = JSON.parse(
await readFile(path.join(pkgRoot, 'bundle.json'), 'utf8'),
) as Bundle
if (slice !== 'full' && !slice.startsWith('agents:')) {
throw new Error(`unknown slice: ${slice}`)
}
const baseline = values['merge-from']
? await loadBaseline(values['merge-from'])
: null
@@ -51,8 +53,8 @@ async function buildSlicedManifest(opts: {
bundle: Bundle
distDir: string
slice: string
baseline: VmManifest | null
}): Promise<VmManifest> {
baseline: AgentManifest | null
}): Promise<AgentManifest> {
if (opts.slice === 'full') {
return buildManifest(
opts.bundle,
@@ -64,16 +66,6 @@ async function buildSlicedManifest(opts: {
if (!baseline) throw new Error(`--slice ${opts.slice} requires --merge-from`)
const updatedAt = new Date().toISOString()
if (opts.slice === 'vm') {
return {
...baseline,
schemaVersion: 1,
vmVersion: opts.bundle.vmVersion,
updatedAt,
vmDisk: await readVmDisk(opts.bundle.vmVersion, opts.distDir),
}
}
if (opts.slice.startsWith('agents:')) {
const name = opts.slice.slice('agents:'.length)
const agent = opts.bundle.agents.find((entry) => entry.name === name)
@@ -81,6 +73,7 @@ async function buildSlicedManifest(opts: {
return {
...baseline,
schemaVersion: 2,
updatedAt,
agents: {
...baseline.agents,
@@ -110,26 +103,10 @@ async function readAllInputs(
}
return {
vmDisk: await readArtifactInputs((arch) =>
path.join(distDir, path.basename(qcow2Key(bundle.vmVersion, arch))),
),
agents,
}
}
async function readVmDisk(
vmVersion: string,
distDir: string,
): Promise<Record<Arch, Artifact>> {
const vmDisk = {} as Record<Arch, Artifact>
for (const arch of ARCHES) {
const key = qcow2Key(vmVersion, arch)
const artifactPath = path.join(distDir, path.basename(key))
vmDisk[arch] = { key, ...(await readArtifactInput(artifactPath)) }
}
return vmDisk
}
async function readAgentEntry(
agent: BundleAgent,
distDir: string,
@@ -143,16 +120,6 @@ async function readAgentEntry(
return { image: agent.image, version: agent.version, tarballs }
}
async function readArtifactInputs(
pathForArch: (arch: Arch) => string,
): Promise<Record<Arch, { sha256: string; sizeBytes: number }>> {
const out = {} as Record<Arch, { sha256: string; sizeBytes: number }>
for (const arch of ARCHES) {
out[arch] = await readArtifactInput(pathForArch(arch))
}
return out
}
async function readArtifactInput(
filePath: string,
): Promise<{ sha256: string; sizeBytes: number }> {
@@ -162,14 +129,14 @@ async function readArtifactInput(
}
}
async function loadBaseline(src: string): Promise<VmManifest> {
async function loadBaseline(src: string): Promise<AgentManifest> {
if (src.startsWith('http://') || src.startsWith('https://')) {
const response = await fetchWithTimeout(src)
if (!response.ok) {
throw new Error(`baseline fetch failed: ${src} (${response.status})`)
}
return (await response.json()) as VmManifest
return (await response.json()) as AgentManifest
}
return JSON.parse(await readFile(src, 'utf8')) as VmManifest
return JSON.parse(await readFile(src, 'utf8')) as AgentManifest
}

View File

@@ -1,105 +0,0 @@
#!/usr/bin/env bun
import { mkdtemp, rm, writeFile } from 'node:fs/promises'
import { tmpdir } from 'node:os'
import path from 'node:path'
import { parseArgs } from 'node:util'
import { $ } from 'bun'
import { type Arch, parseArch } from './common/arch'
const INSTANCE_NAME = 'browseros-vm-smoke'
const SOCKET_POLL_INTERVAL_MS = 2000
const SOCKET_POLL_TIMEOUT_MS = 120_000
type BunRequestInit = RequestInit & { unix?: string }
const { values } = parseArgs({
args: Bun.argv.slice(2),
options: {
qcow: { type: 'string' },
arch: { type: 'string', default: 'x64' },
limactl: { type: 'string', default: 'limactl' },
},
})
if (!values.qcow) {
console.error(
'usage: smoke:vm -- --qcow <path.qcow2.zst> [--arch arm64|x64] [--limactl limactl]',
)
process.exit(1)
}
const arch = parseArch(values.arch ?? 'x64')
await bootAndProbe(values.qcow, arch, values.limactl ?? 'limactl')
console.log('vm smoke test passed')
async function bootAndProbe(
qcowZstPath: string,
arch: Arch,
limactl: string,
): Promise<void> {
const workDir = await mkdtemp(path.join(tmpdir(), 'browseros-vm-smoke-'))
const qcowPath = path.join(workDir, 'disk.qcow2')
const configPath = path.join(workDir, 'lima.yaml')
const sockPath = path.join(workDir, 'podman.sock')
try {
await $`zstd -d -f -o ${qcowPath} ${qcowZstPath}`.quiet()
await writeFile(configPath, composeLimaConfig(qcowPath, arch, sockPath))
await $`${limactl} start --name=${INSTANCE_NAME} --tty=false ${configPath}`
await waitForSocket(sockPath)
await probePodmanSocket(sockPath)
} finally {
await $`${limactl} stop --force ${INSTANCE_NAME}`.quiet().nothrow()
await $`${limactl} delete --force ${INSTANCE_NAME}`.quiet().nothrow()
await rm(workDir, { recursive: true, force: true })
}
}
function composeLimaConfig(
qcowPath: string,
arch: Arch,
sockPath: string,
): string {
return `vmType: qemu
images:
- location: ${qcowPath}
arch: ${limaArch(arch)}
containerd:
system: false
user: false
mounts: []
provision: []
portForwards:
- guestSocket: /run/podman/podman.sock
hostSocket: ${sockPath}
proto: unix
`
}
function limaArch(arch: Arch): 'aarch64' | 'x86_64' {
return arch === 'arm64' ? 'aarch64' : 'x86_64'
}
async function waitForSocket(sockPath: string): Promise<void> {
const deadline = Date.now() + SOCKET_POLL_TIMEOUT_MS
while (Date.now() < deadline) {
if (await Bun.file(sockPath).exists()) return
await Bun.sleep(SOCKET_POLL_INTERVAL_MS)
}
throw new Error(
`podman socket did not appear within ${SOCKET_POLL_TIMEOUT_MS}ms: ${sockPath}`,
)
}
async function probePodmanSocket(sockPath: string): Promise<void> {
const init: BunRequestInit = { unix: sockPath }
const response = await fetch('http://d/v4.0.0/libpod/_ping', init)
if (!response.ok) {
throw new Error(`podman ping failed: ${response.status}`)
}
const body = (await response.text()).trim()
if (body !== 'OK') {
throw new Error(`podman ping body unexpected: ${body}`)
}
}

View File

@@ -0,0 +1,80 @@
# BrowserOS VM -- consumed directly by limactl, no build step.
# Based on Lima's built-in podman.yaml + _images/debian-12 templates.
# https://github.com/lima-vm/lima/tree/master/templates
minimumLimaVersion: 2.0.0
vmType: vz
cpus: 2
memory: 2GiB
disk: 10GiB
# Pinned Debian 12 genericcloud -- matches the deprecated disk pipeline pin.
# Bump in lockstep with upstream when provisioning changes.
images:
- location: "https://cloud.debian.org/images/cloud/bookworm/20260413-2447/debian-12-genericcloud-arm64-20260413-2447.qcow2"
arch: aarch64
digest: "sha512:15ad6c52e255c84eb0e91001c5907b27199d8a7164d8ac172cfe9c92850dfaf606a6c3161d6af7f0fd5a5fef2aa8dcd9a23c2eb0fedbfcddb38e2bc306cba98f"
- location: "https://cloud.debian.org/images/cloud/bookworm/20260413-2447/debian-12-genericcloud-amd64-20260413-2447.qcow2"
arch: x86_64
digest: "sha512:db11b13c4efcc37828ffadae521d101e85079d349e1418074087bb7d306f11caccdc2b0b539d6fd50d623d40a898f83c6137268a048d7700397dc35b7dcbc927"
# Fallbacks for when Debian rotates the dated snapshot.
- location: https://cloud.debian.org/images/cloud/bookworm/latest/debian-12-genericcloud-arm64.qcow2
arch: aarch64
- location: https://cloud.debian.org/images/cloud/bookworm/latest/debian-12-genericcloud-amd64.qcow2
arch: x86_64
# Host-state isolation -- matches spec D5 / D7 defaults.
mounts: []
# We run podman, not containerd.
containerd:
system: false
user: false
provision:
- mode: system
script: |
#!/bin/bash
set -eux -o pipefail
if [ -e /etc/browseros-vm-provisioned ]; then exit 0; fi
DEBIAN_FRONTEND=noninteractive apt-get update
DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
podman crun fuse-overlayfs slirp4netns ca-certificates
systemctl enable --now podman.socket
# Keep Docker config out of the image pull path (spec D7).
mkdir -p /etc/containers
containers_auth=/etc/containers/auth
printf '{}\n' > "${containers_auth}.json"
# Single-tenant appliance user (spec D7).
id browseros >/dev/null 2>&1 || useradd --create-home --uid 1000 --shell /bin/bash browseros
usermod -aG sudo browseros
# Version marker consumed by the runtime (WS4).
printf 'provisioned:%s\n' "$(date -u +%Y-%m-%dT%H:%M:%SZ)" > /etc/browseros-vm-version
apt-get clean
rm -rf /var/lib/apt/lists/*
touch /etc/browseros-vm-provisioned
# Block `limactl start` until podman is ready.
probes:
- script: |
#!/bin/bash
set -eux -o pipefail
if ! timeout 60s bash -c 'until systemctl is-active podman.socket >/dev/null; do sleep 2; done'; then
echo >&2 "podman.socket not active after 60s"
exit 1
fi
hint: See /var/log/cloud-init-output.log inside the guest
# Expose guest podman socket to host for the TypeScript runtime to dial.
portForwards:
- guestSocket: "/run/podman/podman.sock"
hostSocket: "{{.Dir}}/sock/podman.sock"

View File

@@ -8,7 +8,7 @@ import {
readLocalManifest,
selectSyncArches,
} from '../scripts/cache-sync'
import type { VmManifest } from '../scripts/common/manifest'
import type { AgentManifest } from '../scripts/common/manifest'
import { sha256File } from '../scripts/common/sha256'
const openclaw = {
@@ -16,45 +16,53 @@ const openclaw = {
version: '2026.4.12',
}
function manifest(
vmVersion: string,
diskSha: string,
tarSha: string,
): VmManifest {
return {
schemaVersion: 1,
vmVersion,
updatedAt: '2026-04-22T00:00:00.000Z',
vmDisk: {
arm64: {
key: `vm/browseros-vm-${vmVersion}-arm64.qcow2.zst`,
sha256: `${diskSha}-arm64`,
sizeBytes: 101,
},
x64: {
key: `vm/browseros-vm-${vmVersion}-x64.qcow2.zst`,
sha256: `${diskSha}-x64`,
sizeBytes: 102,
},
},
agents: {
openclaw: {
...openclaw,
tarballs: {
arm64: {
key: 'vm/images/openclaw-2026.4.12-arm64.tar.gz',
sha256: `${tarSha}-arm64`,
sizeBytes: 201,
},
x64: {
key: 'vm/images/openclaw-2026.4.12-x64.tar.gz',
sha256: `${tarSha}-x64`,
sizeBytes: 202,
},
const claudeCode = {
image: 'ghcr.io/anthropics/claude-code',
version: '2026.4.10',
}
function manifest(tarSha: string, includeSecondAgent = false): AgentManifest {
const agents: AgentManifest['agents'] = {
openclaw: {
...openclaw,
tarballs: {
arm64: {
key: 'vm/images/openclaw-2026.4.12-arm64.tar.gz',
sha256: `${tarSha}-arm64`,
sizeBytes: 201,
},
x64: {
key: 'vm/images/openclaw-2026.4.12-x64.tar.gz',
sha256: `${tarSha}-x64`,
sizeBytes: 202,
},
},
},
}
if (includeSecondAgent) {
agents['claude-code'] = {
...claudeCode,
tarballs: {
arm64: {
key: 'vm/images/claude-code-2026.4.10-arm64.tar.gz',
sha256: `${tarSha}-claude-arm64`,
sizeBytes: 301,
},
x64: {
key: 'vm/images/claude-code-2026.4.10-x64.tar.gz',
sha256: `${tarSha}-claude-x64`,
sizeBytes: 302,
},
},
}
}
return {
schemaVersion: 2,
updatedAt: '2026-04-22T00:00:00.000Z',
agents,
}
}
function keys(plan: PlanItem[]): string[] {
@@ -62,36 +70,33 @@ function keys(plan: PlanItem[]): string[] {
}
describe('planSync', () => {
it('downloads every selected-arch artifact for a fresh cache', () => {
const remote = manifest('2026.04.22', 'd1', 't1')
it('downloads every selected-arch agent artifact for a fresh cache', () => {
const remote = manifest('t1')
expect(
keys(planSync({ local: null, remote, cacheRoot: '/c', arches: ['x64'] })),
).toEqual([
'vm/browseros-vm-2026.04.22-x64.qcow2.zst',
'vm/images/openclaw-2026.4.12-x64.tar.gz',
])
).toEqual(['vm/images/openclaw-2026.4.12-x64.tar.gz'])
})
it('does nothing when the local manifest matches the remote manifest', () => {
const remote = manifest('2026.04.22', 'd1', 't1')
const remote = manifest('t1')
expect(
planSync({ local: remote, remote, cacheRoot: '/c', arches: ['x64'] }),
).toEqual([])
})
it('downloads only artifacts whose sha256 changed', () => {
const local = manifest('2026.04.20', 'd-old', 't1')
const remote = manifest('2026.04.22', 'd-new', 't1')
it('downloads only agent artifacts whose sha256 changed', () => {
const local = manifest('old-tar')
const remote = manifest('new-tar')
expect(
keys(planSync({ local, remote, cacheRoot: '/c', arches: ['x64'] })),
).toEqual(['vm/browseros-vm-2026.04.22-x64.qcow2.zst'])
).toEqual(['vm/images/openclaw-2026.4.12-x64.tar.gz'])
})
it('supports syncing all release arches', () => {
const remote = manifest('2026.04.22', 'd1', 't1')
const remote = manifest('t1')
expect(
planSync({
@@ -100,7 +105,7 @@ describe('planSync', () => {
cacheRoot: '/c',
arches: ['arm64', 'x64'],
}),
).toHaveLength(4)
).toHaveLength(2)
})
it('selects host arch by default and both arches when requested', () => {
@@ -144,43 +149,31 @@ describe('emit-manifest', () => {
dir = null
})
it('merges a vm slice while preserving agents from the baseline', async () => {
it('rejects the retired vm slice', async () => {
dir = await mkdtemp(path.join(tmpdir(), 'browseros-emit-vm-'))
const distDir = path.join(dir, 'dist')
await writeVmFiles(distDir)
const baseline = manifest('2026.04.20', 'old-disk', 'old-tar')
const baselinePath = path.join(dir, 'baseline.json')
const outPath = path.join(dir, 'manifest.json')
await writeJson(baselinePath, baseline)
await runEmitManifest([
'--slice',
'vm',
'--dist-dir',
distDir,
'--merge-from',
baselinePath,
'--out',
outPath,
])
const merged = JSON.parse(await readFile(outPath, 'utf8')) as VmManifest
expect(merged.vmVersion).toBe('2026.04.22')
expect(merged.agents).toEqual(baseline.agents)
expect(merged.vmDisk.x64.sha256).toBe(
await sha256File(
path.join(distDir, 'browseros-vm-2026.04.22-x64.qcow2.zst'),
),
const result = await runEmitManifest(
[
'--slice',
'vm',
'--dist-dir',
path.join(dir, 'dist'),
'--out',
path.join(dir, 'manifest.json'),
],
false,
)
expect(result.code).toBe(1)
expect(result.stderr).toContain('unknown slice: vm')
})
it('merges an agent slice while preserving vmDisk from the baseline', async () => {
it('merges an agent slice while preserving other agents from the baseline', async () => {
dir = await mkdtemp(path.join(tmpdir(), 'browseros-emit-agent-'))
const distDir = path.join(dir, 'dist')
await writeAgentFiles(distDir)
const baseline = manifest('2026.04.20', 'old-disk', 'old-tar')
const baseline = manifest('old-tar', true)
const baselinePath = path.join(dir, 'baseline.json')
const outPath = path.join(dir, 'manifest.json')
await writeJson(baselinePath, baseline)
@@ -196,9 +189,9 @@ describe('emit-manifest', () => {
outPath,
])
const merged = JSON.parse(await readFile(outPath, 'utf8')) as VmManifest
expect(merged.vmVersion).toBe('2026.04.20')
expect(merged.vmDisk).toEqual(baseline.vmDisk)
const merged = JSON.parse(await readFile(outPath, 'utf8')) as AgentManifest
expect(merged.schemaVersion).toBe(2)
expect(merged.agents['claude-code']).toEqual(baseline.agents['claude-code'])
expect(merged.agents.openclaw.tarballs.arm64.sha256).toBe(
await sha256File(
path.join(distDir, 'images/openclaw-2026.4.12-arm64.tar.gz'),
@@ -208,15 +201,13 @@ describe('emit-manifest', () => {
it('fails slice emission without a merge baseline', async () => {
dir = await mkdtemp(path.join(tmpdir(), 'browseros-emit-fail-'))
const distDir = path.join(dir, 'dist')
await writeVmFiles(distDir)
const result = await runEmitManifest(
[
'--slice',
'vm',
'agents:openclaw',
'--dist-dir',
distDir,
path.join(dir, 'dist'),
'--out',
path.join(dir, 'out.json'),
],
@@ -224,22 +215,12 @@ describe('emit-manifest', () => {
)
expect(result.code).toBe(1)
expect(result.stderr).toContain('--slice vm requires --merge-from')
expect(result.stderr).toContain(
'--slice agents:openclaw requires --merge-from',
)
})
})
async function writeVmFiles(distDir: string): Promise<void> {
await mkdir(distDir, { recursive: true })
await writeFile(
path.join(distDir, 'browseros-vm-2026.04.22-arm64.qcow2.zst'),
'arm disk',
)
await writeFile(
path.join(distDir, 'browseros-vm-2026.04.22-x64.qcow2.zst'),
'x64 disk',
)
}
async function writeAgentFiles(distDir: string): Promise<void> {
await mkdir(path.join(distDir, 'images'), { recursive: true })
await writeFile(

View File

@@ -6,13 +6,11 @@ import {
type ArtifactInputs,
type Bundle,
buildManifest,
qcow2Key,
tarballKey,
} from '../scripts/common/manifest'
import { verifySha256 } from '../scripts/common/sha256'
const bundle: Bundle = {
vmVersion: '2026.04.22',
agents: [
{
name: 'openclaw',
@@ -23,10 +21,6 @@ const bundle: Bundle = {
}
const inputs: ArtifactInputs = {
vmDisk: {
arm64: { sha256: 'disk-arm', sizeBytes: 11 },
x64: { sha256: 'disk-x64', sizeBytes: 12 },
},
agents: {
openclaw: {
arm64: { sha256: 'tar-arm', sizeBytes: 21 },
@@ -37,32 +31,24 @@ const inputs: ArtifactInputs = {
describe('manifest helpers', () => {
it('builds release artifact keys', () => {
expect(qcow2Key('2026.04.22', 'arm64')).toBe(
'vm/browseros-vm-2026.04.22-arm64.qcow2.zst',
)
expect(tarballKey('openclaw', '2026.4.12', 'x64')).toBe(
'vm/images/openclaw-2026.4.12-x64.tar.gz',
)
})
it('builds a manifest from bundle metadata and artifact inputs', () => {
it('builds an agents-only manifest from bundle metadata and artifact inputs', () => {
const manifest = buildManifest(
bundle,
inputs,
new Date('2026-04-22T00:00:00.000Z'),
)
for (const field of ['vm' + 'Version', 'vm' + 'Disk']) {
expect(Object.hasOwn(manifest, field)).toBe(false)
}
expect(manifest).toMatchObject({
schemaVersion: 1,
vmVersion: '2026.04.22',
schemaVersion: 2,
updatedAt: '2026-04-22T00:00:00.000Z',
vmDisk: {
arm64: {
key: 'vm/browseros-vm-2026.04.22-arm64.qcow2.zst',
sha256: 'disk-arm',
sizeBytes: 11,
},
},
agents: {
openclaw: {
image: 'ghcr.io/openclaw/openclaw',
@@ -79,17 +65,9 @@ describe('manifest helpers', () => {
})
})
it('fails when required artifact inputs are missing', () => {
it('fails when required tarball inputs are missing', () => {
expect(() =>
buildManifest(bundle, {
vmDisk: { arm64: inputs.vmDisk.arm64 } as ArtifactInputs['vmDisk'],
agents: inputs.agents,
}),
).toThrow('missing vmDisk inputs for arch x64')
expect(() =>
buildManifest(bundle, {
vmDisk: inputs.vmDisk,
agents: { openclaw: { arm64: inputs.agents.openclaw.arm64 } },
} as unknown as ArtifactInputs),
).toThrow('missing tarball inputs for openclaw/x64')

View File

@@ -21,6 +21,16 @@
"os": ["macos"],
"arch": ["x64"],
"executable": true
},
{
"name": "BrowserOS VM Lima template",
"source": {
"type": "local",
"path": "packages/build-tools/template/browseros-vm.yaml"
},
"destination": "resources/vm/browseros-vm.yaml",
"os": ["macos"],
"arch": ["arm64", "x64"]
}
]
}