Compare commits

..

14 Commits

Author SHA1 Message Date
Nikhil Sonti
a68b31234b fix: limit VM PR checks to build-tools validation 2026-04-22 16:14:03 -07:00
Nikhil Sonti
1c6163149e fix: avoid guest DNS for VM package install 2026-04-22 16:07:46 -07:00
Nikhil Sonti
178cd95839 fix: set VM build DNS in apt command 2026-04-22 15:58:51 -07:00
Nikhil Sonti
3a8a0f8a7f fix: keep arm64 VM recipe simple 2026-04-22 15:52:33 -07:00
Nikhil Sonti
e590b313fe fix: prioritize arm64 build workflows 2026-04-22 15:47:20 -07:00
Nikhil Sonti
6a03275b2a fix: stabilize VM build DNS in CI 2026-04-22 15:43:34 -07:00
Nikhil Sonti
d54a2f3980 fix: address review feedback for PR #785 2026-04-22 15:37:58 -07:00
Nikhil Sonti
0279c0708b chore: remove legacy container packages + workflows 2026-04-22 15:09:16 -07:00
Nikhil Sonti
695c775b87 ci(build-tools): independent build-vm + build-agent workflows 2026-04-22 15:08:11 -07:00
Nikhil Sonti
aedb6634e5 feat(build-tools): emit-manifest + cache:sync 2026-04-22 15:05:28 -07:00
Nikhil Sonti
67153772a0 feat(build-tools): build-tarball script 2026-04-22 15:01:40 -07:00
Nikhil Sonti
29f8cc718f feat(build-tools): build-disk script with virt-customize + zstd 2026-04-22 15:00:52 -07:00
Nikhil Sonti
5adf119c3b feat(build-tools): manifest types + R2 helper 2026-04-22 14:59:30 -07:00
Nikhil Sonti
ab82f4576a feat(build-tools): scaffold package + cache dir helpers 2026-04-22 14:57:21 -07:00
23 changed files with 765 additions and 326 deletions

View File

@@ -16,6 +16,7 @@ on:
pull_request:
paths:
- "packages/browseros-agent/packages/build-tools/**"
- "!packages/browseros-agent/packages/build-tools/scripts/build-disk.ts"
- ".github/workflows/build-agent.yml"
env:

179
.github/workflows/build-vm.yml vendored Normal file
View File

@@ -0,0 +1,179 @@
name: build-vm
on:
workflow_dispatch:
inputs:
version:
description: "VM version (e.g. 2026.04.22)"
required: true
type: string
publish:
description: "Upload to R2 and merge manifest slice"
required: false
default: false
type: boolean
pull_request:
paths:
- "packages/browseros-agent/packages/build-tools/**"
- "!packages/browseros-agent/packages/build-tools/scripts/build-tarball.ts"
- ".github/workflows/build-vm.yml"
env:
BUN_VERSION: "1.3.6"
PKG_DIR: packages/browseros-agent/packages/build-tools
permissions:
contents: read
jobs:
check:
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v4
- uses: oven-sh/setup-bun@v2
with:
bun-version: ${{ env.BUN_VERSION }}
- working-directory: packages/browseros-agent
run: bun install --frozen-lockfile
- working-directory: packages/browseros-agent
run: bun run --filter @browseros/build-tools typecheck
- working-directory: packages/browseros-agent
run: bun run --filter @browseros/build-tools test
build:
needs: check
if: ${{ github.event_name == 'workflow_dispatch' }}
strategy:
fail-fast: false
matrix:
include:
- arch: arm64
runner: ubuntu-24.04-arm
runs-on: ${{ matrix.runner }}
steps:
- uses: actions/checkout@v4
- uses: oven-sh/setup-bun@v2
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Install libguestfs, qemu, and zstd
run: |
sudo apt-get update
sudo apt-get install -y libguestfs-tools qemu-utils zstd
sudo chmod 0644 /boot/vmlinuz-* /boot/initrd.img-* || true
[ -e /dev/kvm ] && sudo chmod 0666 /dev/kvm || true
[ -x /usr/bin/passt ] && sudo mv /usr/bin/passt /usr/bin/passt.disabled || true
echo "LIBGUESTFS_BACKEND=direct" >> "$GITHUB_ENV"
- working-directory: packages/browseros-agent
run: bun install --frozen-lockfile
- name: Validate VM version input
if: ${{ github.event_name == 'workflow_dispatch' }}
working-directory: ${{ env.PKG_DIR }}
env:
VERSION: ${{ inputs.version }}
run: |
set -euo pipefail
bundle_version="$(bun -e "const b = await Bun.file('bundle.json').json(); console.log(b.vmVersion)")"
if [ "$VERSION" != "$bundle_version" ]; then
echo "inputs.version ($VERSION) must match bundle.json vmVersion ($bundle_version)" >&2
exit 1
fi
- name: Build disk
working-directory: ${{ env.PKG_DIR }}
env:
VERSION: ${{ inputs.version || format('pr-{0}', github.event.pull_request.number) }}
OUT: ${{ github.workspace }}/dist/vm
run: bun run build:disk -- --version "$VERSION" --arch "${{ matrix.arch }}" --output-dir "$OUT"
- uses: actions/upload-artifact@v4
with:
name: vm-disk-${{ matrix.arch }}
path: dist/vm/
retention-days: 7
smoke:
needs: build
if: ${{ github.event_name == 'workflow_dispatch' }}
runs-on: ubuntu-24.04-arm
steps:
- uses: actions/checkout@v4
- uses: oven-sh/setup-bun@v2
with:
bun-version: ${{ env.BUN_VERSION }}
- uses: actions/download-artifact@v4
with:
name: vm-disk-arm64
path: dist/vm
- name: Install qemu, zstd, curl, and Lima
run: |
set -euo pipefail
sudo apt-get update
sudo apt-get install -y qemu-system-arm qemu-utils zstd curl
lima_version="$(cat "${PKG_DIR}/recipe/LIMA_VERSION.pin")"
lima_sha256="$(cat "${PKG_DIR}/recipe/LIMA_LINUX_AARCH64_SHA256.pin")"
curl -fsSL -o /tmp/lima.tar.gz \
"https://github.com/lima-vm/lima/releases/download/${lima_version}/lima-${lima_version#v}-Linux-aarch64.tar.gz"
echo "${lima_sha256} /tmp/lima.tar.gz" | sha256sum --check --strict
sudo tar -C /usr/local -xzf /tmp/lima.tar.gz
- working-directory: packages/browseros-agent
run: bun install --frozen-lockfile
- name: Smoke test VM disk
working-directory: ${{ env.PKG_DIR }}
run: |
set -euo pipefail
qcow="$(find "$GITHUB_WORKSPACE/dist/vm" -name 'browseros-vm-*-arm64.qcow2.zst' -print -quit)"
if [ -z "$qcow" ]; then
echo "missing arm64 VM disk artifact" >&2
exit 1
fi
bun run smoke:vm -- --arch arm64 --qcow "$qcow" --limactl /usr/local/bin/limactl
publish:
needs: [build, smoke]
if: ${{ github.event_name == 'workflow_dispatch' && inputs.publish == true }}
runs-on: ubuntu-24.04
environment: release
concurrency:
group: r2-manifest-publish
cancel-in-progress: false
steps:
- uses: actions/checkout@v4
- uses: oven-sh/setup-bun@v2
with:
bun-version: ${{ env.BUN_VERSION }}
- uses: actions/download-artifact@v4
with:
pattern: vm-disk-*
path: dist
merge-multiple: true
- working-directory: packages/browseros-agent
run: bun install --frozen-lockfile
- name: Upload VM disks to R2
working-directory: ${{ env.PKG_DIR }}
env:
R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}
R2_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
R2_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
R2_BUCKET: ${{ secrets.R2_BUCKET }}
run: |
set -euo pipefail
for file in "$GITHUB_WORKSPACE"/dist/*.qcow2.zst; do
base="$(basename "$file")"
bun run upload -- --file "$file" --key "vm/$base" --content-type "application/zstd" --sidecar-sha
done
- name: Merge VM slice into manifest
working-directory: ${{ env.PKG_DIR }}
env:
R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}
R2_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
R2_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
R2_BUCKET: ${{ secrets.R2_BUCKET }}
run: |
set -euo pipefail
mkdir -p dist
cp -R "$GITHUB_WORKSPACE"/dist/* dist/
bun run download -- --key vm/manifest.json --out dist/baseline-manifest.json
bun run emit-manifest -- \
--slice vm \
--dist-dir dist \
--merge-from dist/baseline-manifest.json \
--out dist/manifest.json
bun run upload -- --file dist/manifest.json --key vm/manifest.json --content-type "application/json"

View File

@@ -2,7 +2,7 @@
R2_ACCOUNT_ID=
R2_ACCESS_KEY_ID=
R2_SECRET_ACCESS_KEY=
R2_BUCKET=browseros
R2_BUCKET=browseros-artifacts
# Public CDN base - used by cache:sync to GET manifest and artifacts
R2_PUBLIC_BASE_URL=https://cdn.browseros.com

View File

@@ -1,8 +1,6 @@
# @browseros/build-tools
Builds agent image tarballs, publishes release artifacts to R2, and hydrates the local dev cache for agent tarballs.
The BrowserOS VM is defined by a committed Lima template at `template/browseros-vm.yaml`. There is no custom disk build step; `limactl` consumes the template directly at runtime.
Builds BrowserOS VM disks and agent image tarballs, publishes release artifacts to R2, and hydrates the local VM artifact cache for development.
## Setup
@@ -11,28 +9,13 @@ cp packages/build-tools/.env.sample packages/build-tools/.env
bun install
```
## Dev loop against the Lima template
## Build a VM disk
Requires `limactl` on PATH. It is bundled with the server; for bare-worktree use, install Lima with Homebrew.
Requires `libguestfs`, `qemu-img`, and `zstd` in an arm64 Linux environment.
On Apple Silicon, run this from an arm64 Lima/Debian VM rather than directly on macOS.
```bash
brew install lima
```
```bash
limactl start \
--name browseros-vm-dev \
packages/browseros-agent/packages/build-tools/template/browseros-vm.yaml
limactl shell browseros-vm-dev podman info
SOCK="$(limactl list browseros-vm-dev --format '{{.Dir}}')/sock/podman.sock"
curl --unix-socket "$SOCK" http://d/v5.0.0/libpod/_ping
bun run --filter @browseros/build-tools build:tarball -- --agent openclaw --arch arm64
limactl shell browseros-vm-dev podman load -i "$(ls dist/images/openclaw-*-arm64.tar.gz | head -1)"
limactl delete --force browseros-vm-dev
bun run --filter @browseros/build-tools build:disk -- --version 2026.04.22 --arch arm64
```
## Build an agent tarball
@@ -43,9 +26,12 @@ Requires `podman`.
bun run --filter @browseros/build-tools build:tarball -- --agent openclaw --arch arm64
```
## Smoke test an agent tarball
## Smoke test artifacts
VM smoke tests require `limactl`, `qemu`, and `zstd`. Agent tarball smoke tests require `podman`.
```bash
bun run --filter @browseros/build-tools smoke:vm -- --arch arm64 --qcow ./dist/browseros-vm-2026.04.22-arm64.qcow2.zst
bun run --filter @browseros/build-tools smoke:tarball -- --agent openclaw --arch arm64 --tarball ./dist/images/openclaw-2026.4.12-arm64.tar.gz
```
@@ -55,9 +41,10 @@ bun run --filter @browseros/build-tools smoke:tarball -- --agent openclaw --arch
bun run --filter @browseros/build-tools emit-manifest -- --dist-dir packages/build-tools/dist
```
Publish workflows can update one agent slice at a time. Sliced publishing requires an existing R2 `vm/manifest.json` baseline; bootstrap first releases with `--slice full`.
Publish workflows can update only one manifest slice at a time. Sliced publishing requires an existing R2 `vm/manifest.json` baseline; bootstrap first releases with `--slice full`.
```bash
bun run --filter @browseros/build-tools emit-manifest -- --slice vm --merge-from https://cdn.browseros.com/vm/manifest.json
bun run --filter @browseros/build-tools emit-manifest -- --slice agents:openclaw --merge-from https://cdn.browseros.com/vm/manifest.json
```
@@ -67,13 +54,4 @@ bun run --filter @browseros/build-tools emit-manifest -- --slice agents:openclaw
NODE_ENV=development bun run --filter @browseros/build-tools cache:sync
```
Pulls the published manifest and tarballs from R2 (`cdn.browseros.com/vm/`). Development cache files land under `~/.browseros-dev/cache/vm/images/`. Production-mode cache files land under `~/.browseros/cache/vm/images/`.
## Seed the dev cache from a local build
```bash
bun run --filter @browseros/build-tools build:tarball -- --agent openclaw --arch arm64
NODE_ENV=development bun run --filter @browseros/build-tools cache:sync:dev
```
`cache:sync:dev` hardcodes `arm64` (all devs are on Apple Silicon), skips R2 entirely, and writes an arm64-only manifest + tarball into `~/.browseros-dev/cache/vm/` from `./dist/`. It refuses to run unless `NODE_ENV=development`. Use this when you want to test the server against a local tarball without publishing.
Development cache files land under `~/.browseros-dev/cache/vm/`. Production-mode cache files land under `~/.browseros/cache/vm/`.

View File

@@ -1,4 +1,5 @@
{
"vmVersion": "2026.04.22",
"agents": [
{
"name": "openclaw",

View File

@@ -5,13 +5,14 @@
"type": "module",
"description": "BrowserOS release artifact producer and dev cache sync",
"scripts": {
"build:disk": "bun run scripts/build-disk.ts",
"build:tarball": "bun run scripts/build-tarball.ts",
"emit-manifest": "bun run scripts/emit-manifest.ts",
"upload": "bun run scripts/upload-to-r2.ts",
"download": "bun run scripts/download-from-r2.ts",
"cache:sync": "bun run scripts/cache-sync.ts",
"cache:sync:dev": "bun run scripts/cache-sync-dev.ts",
"smoke:tarball": "bun run scripts/smoke-tarball.ts",
"smoke:vm": "bun run scripts/smoke-vm.ts",
"test": "bun test",
"typecheck": "tsc --noEmit"
},

View File

@@ -0,0 +1 @@
ef5acb5908f6ef1f7ffcf3a63913cdf618da3229ffa3b04e3727959e36bb9de1

View File

@@ -0,0 +1 @@
e879ce3547728da306bb0e634ee9f8309b8923b75873bf44cac161853b170f2b

View File

@@ -0,0 +1 @@
v1.2.0

View File

@@ -0,0 +1 @@
{}

View File

@@ -0,0 +1,12 @@
{
"arm64": {
"upstreamVersion": "20260413-2447",
"url": "https://cloud.debian.org/images/cloud/bookworm/20260413-2447/debian-12-genericcloud-arm64-20260413-2447.qcow2",
"sha512": "15ad6c52e255c84eb0e91001c5907b27199d8a7164d8ac172cfe9c92850dfaf606a6c3161d6af7f0fd5a5fef2aa8dcd9a23c2eb0fedbfcddb38e2bc306cba98f"
},
"x64": {
"upstreamVersion": "20260413-2447",
"url": "https://cloud.debian.org/images/cloud/bookworm/20260413-2447/debian-12-genericcloud-amd64-20260413-2447.qcow2",
"sha512": "db11b13c4efcc37828ffadae521d101e85079d349e1418074087bb7d306f11caccdc2b0b539d6fd50d623d40a898f83c6137268a048d7700397dc35b7dcbc927"
}
}

View File

@@ -0,0 +1,19 @@
# BrowserOS VM recipe — Debian 12 (bookworm) genericcloud
# Consumed by scripts/build-disk.ts. One virt-customize primitive per line.
# Ops: run-command | copy-in <src>:<dest-dir> | upload <src>:<dest-file> | write <path>:<content> | truncate <path>
# {version} and {manifest_tmp} are substituted at build time.
run-command apt-get update
run-command DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends podman crun fuse-overlayfs slirp4netns ca-certificates
run-command systemctl enable podman.socket
copy-in auth.json:/etc/containers/
run-command useradd --create-home --uid 1000 --shell /bin/bash browseros
run-command usermod -aG sudo browseros
copy-in sudoers-browseros:/etc/sudoers.d/
run-command chmod 0440 /etc/sudoers.d/sudoers-browseros
write /etc/browseros-vm-version:{version}
upload {manifest_tmp}:/etc/browseros-vm-manifest.json
run-command apt-get clean
run-command rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
run-command rm -f /etc/ssh/ssh_host_*
truncate /etc/machine-id

View File

@@ -0,0 +1 @@
browseros ALL=(ALL) NOPASSWD: ALL

View File

@@ -0,0 +1,216 @@
#!/usr/bin/env bun
import { createHash } from 'node:crypto'
import { createReadStream } from 'node:fs'
import {
copyFile,
mkdir,
readFile,
rm,
stat,
writeFile,
} from 'node:fs/promises'
import path from 'node:path'
import { parseArgs } from 'node:util'
import { $ } from 'bun'
import { type Arch, parseArch } from './common/arch'
import { fetchWithTimeout } from './common/fetch'
import { qcow2Key } from './common/manifest'
import { sha256File } from './common/sha256'
type ChunkSink = ReturnType<ReturnType<typeof Bun.file>['writer']>
const { values } = parseArgs({
args: Bun.argv.slice(2),
options: {
version: { type: 'string' },
arch: { type: 'string' },
'output-dir': { type: 'string', default: './dist' },
},
})
if (!values.version || !values.arch) {
console.error(
'usage: build:disk -- --version <YYYY.MM.DD[-N]> --arch <arm64|x64> [--output-dir ./dist]',
)
process.exit(1)
}
const arch = parseArch(values.arch)
const version = values.version
const outDir = values['output-dir']
const pkgRoot = path.resolve(import.meta.dir, '..')
await mkdir(outDir, { recursive: true })
const baseImages = JSON.parse(
await readFile(path.join(pkgRoot, 'recipe/base-images.json'), 'utf8'),
) as Record<Arch, { upstreamVersion: string; url: string; sha512: string }>
const base = baseImages[arch]
if (!base) throw new Error(`missing base image for arch ${arch}`)
const basePath = path.join(outDir, `base-${arch}.qcow2`)
const workPath = path.join(outDir, `work-${version}-${arch}.qcow2`)
const buildMarkerPath = path.join(outDir, `build-marker-${arch}.json`)
const recipePath = path.join(pkgRoot, 'recipe/browseros-vm.recipe')
const rawOut = path.join(outDir, `browseros-vm-${version}-${arch}.qcow2`)
const zstOut = `${rawOut}.zst`
try {
await download(base.url, basePath)
await verifySha512(basePath, base.sha512)
await copyFile(basePath, workPath)
await writeFile(
buildMarkerPath,
`${JSON.stringify({ name: 'browseros-vm', version, arch, phase: 'build' }, null, 2)}\n`,
)
const recipeText = await readFile(recipePath, 'utf8')
const args = composeVirtCustomizeArgs({
diskPath: workPath,
recipeText,
recipeDir: path.dirname(recipePath),
substitutions: { version, manifest_tmp: buildMarkerPath },
})
await spawnChecked(['virt-customize', ...args])
await $`virt-sparsify --in-place ${workPath}`.quiet()
await $`qemu-img convert -O qcow2 -c ${workPath} ${rawOut}`.quiet()
await $`zstd -19 --long=30 -T0 -f -o ${zstOut} ${rawOut}`.quiet()
const sha = await sha256File(zstOut)
const size = (await stat(zstOut)).size
await writeFile(`${zstOut}.sha256`, `${sha} ${path.basename(zstOut)}\n`)
console.log(
JSON.stringify(
{
key: qcow2Key(version, arch),
path: zstOut,
sha256: sha,
sizeBytes: size,
},
null,
2,
),
)
} finally {
await rm(workPath, { force: true })
await rm(basePath, { force: true })
await rm(rawOut, { force: true })
await rm(buildMarkerPath, { force: true })
}
function composeVirtCustomizeArgs(opts: {
diskPath: string
recipeText: string
recipeDir: string
substitutions: Record<string, string>
}): string[] {
const out = ['-a', opts.diskPath, '--network']
for (const rawLine of opts.recipeText.split('\n')) {
const line = rawLine.trim()
if (!line || line.startsWith('#')) continue
const spaceAt = line.indexOf(' ')
if (spaceAt === -1) throw new Error(`invalid recipe line: ${line}`)
const op = line.slice(0, spaceAt)
const rest = subst(line.slice(spaceAt + 1), opts.substitutions)
if (op === 'run-command') {
out.push('--run-command', rest)
continue
}
if (op === 'copy-in') {
const colonAt = rest.indexOf(':')
if (colonAt === -1) throw new Error(`invalid copy-in line: ${line}`)
const source = rest.slice(0, colonAt)
const target = rest.slice(colonAt + 1)
out.push('--copy-in', `${path.resolve(opts.recipeDir, source)}:${target}`)
continue
}
if (op === 'upload') {
const colonAt = rest.indexOf(':')
if (colonAt === -1) throw new Error(`invalid upload line: ${line}`)
const source = rest.slice(0, colonAt)
const target = rest.slice(colonAt + 1)
out.push('--upload', `${path.resolve(opts.recipeDir, source)}:${target}`)
continue
}
if (op === 'write') {
out.push('--write', rest)
continue
}
if (op === 'truncate') {
out.push('--truncate', rest)
continue
}
throw new Error(`unknown recipe op: ${op}`)
}
return out
}
function subst(value: string, vars: Record<string, string>): string {
return value.replace(/\{(\w+)\}/g, (_match, key: string) => {
const replacement = vars[key]
if (!replacement) throw new Error(`no substitution for {${key}}`)
return replacement
})
}
async function download(url: string, dest: string): Promise<void> {
const response = await fetchWithTimeout(url)
if (!response.ok || !response.body) {
throw new Error(`download failed: ${url} (${response.status})`)
}
const sink = Bun.file(dest).writer()
const reader = response.body.getReader()
try {
await pumpStream(reader, sink)
} finally {
await sink.end()
}
}
async function verifySha512(filePath: string, expected: string): Promise<void> {
const hash = createHash('sha512')
for await (const chunk of createReadStream(filePath)) {
hash.update(chunk)
}
const actual = hash.digest('hex')
if (actual !== expected) {
throw new Error(
`sha512 mismatch for ${filePath}: expected ${expected}, got ${actual}`,
)
}
}
async function spawnChecked(argv: string[]): Promise<void> {
const proc = Bun.spawn(argv, {
stdout: 'inherit',
stderr: 'inherit',
env: {
...process.env,
LIBGUESTFS_BACKEND: process.env.LIBGUESTFS_BACKEND ?? 'direct',
},
})
const code = await proc.exited
if (code !== 0) throw new Error(`${argv[0]} exited ${code}`)
}
async function pumpStream(
reader: ReadableStreamDefaultReader<Uint8Array>,
sink: ChunkSink,
): Promise<void> {
for (;;) {
const { done, value } = await reader.read()
if (done) break
sink.write(value)
}
}

View File

@@ -1,92 +0,0 @@
#!/usr/bin/env bun
import { copyFile, mkdir, readFile, stat, writeFile } from 'node:fs/promises'
import { homedir } from 'node:os'
import path from 'node:path'
import { PATHS } from '@browseros/shared/constants/paths'
import type { Arch } from './common/arch'
import {
type AgentEntry,
type AgentManifest,
type Bundle,
tarballKey,
} from './common/manifest'
import { sha256File, verifySha256 } from './common/sha256'
const ARM64: Arch = 'arm64'
if (process.env.NODE_ENV !== 'development') {
throw new Error(
'cache:sync:dev refuses to run without NODE_ENV=development — it writes to ~/.browseros-dev/cache/vm/',
)
}
const pkgRoot = path.resolve(import.meta.dir, '..')
const distDir = path.join(pkgRoot, 'dist')
const bundle = JSON.parse(
await readFile(path.join(pkgRoot, 'bundle.json'), 'utf8'),
) as Bundle
const cacheRoot = path.join(
homedir(),
PATHS.DEV_BROWSEROS_DIR_NAME,
PATHS.CACHE_DIR_NAME,
)
const imagesDir = path.join(cacheRoot, 'vm', 'images')
const manifestPath = path.join(cacheRoot, 'vm', 'manifest.json')
await mkdir(imagesDir, { recursive: true })
const agents: Record<string, AgentEntry> = {}
for (const agent of bundle.agents) {
const key = tarballKey(agent.name, agent.version, ARM64)
const srcTarball = path.join(distDir, 'images', path.basename(key))
await assertExists(srcTarball)
const sha256 = await sha256File(srcTarball)
const sizeBytes = (await stat(srcTarball)).size
const destTarball = path.join(cacheRoot, key)
if (await matchesExisting(destTarball, sha256)) {
console.log(`cache hit: ${key}`)
} else {
await mkdir(path.dirname(destTarball), { recursive: true })
await copyFile(srcTarball, destTarball)
await verifySha256(destTarball, sha256)
console.log(`seeded ${key}`)
}
agents[agent.name] = {
image: agent.image,
version: agent.version,
tarballs: { arm64: { key, sha256, sizeBytes } } as AgentEntry['tarballs'],
}
}
const manifest: AgentManifest = {
schemaVersion: 2,
updatedAt: new Date().toISOString(),
agents,
}
await writeFile(manifestPath, `${JSON.stringify(manifest, null, 2)}\n`)
console.log(`manifest written to ${manifestPath}`)
async function assertExists(filePath: string): Promise<void> {
try {
await stat(filePath)
} catch {
throw new Error(
`missing ${filePath} — run: bun run build:tarball -- --agent <name> --arch arm64`,
)
}
}
async function matchesExisting(
filePath: string,
expectedSha: string,
): Promise<boolean> {
try {
await stat(filePath)
} catch {
return false
}
return (await sha256File(filePath)) === expectedSha
}

View File

@@ -6,7 +6,7 @@ import { parseArgs } from 'node:util'
import { PATHS } from '@browseros/shared/constants/paths'
import { ARCHES, type Arch } from './common/arch'
import { fetchWithTimeout } from './common/fetch'
import type { AgentManifest, Artifact } from './common/manifest'
import type { Artifact, VmManifest } from './common/manifest'
import { verifySha256 } from './common/sha256'
type ChunkSink = ReturnType<ReturnType<typeof Bun.file>['writer']>
@@ -18,13 +18,20 @@ export interface PlanItem {
}
export function planSync(opts: {
local: AgentManifest | null
remote: AgentManifest
local: VmManifest | null
remote: VmManifest
cacheRoot: string
arches: Arch[]
}): PlanItem[] {
const out: PlanItem[] = []
for (const arch of opts.arches) {
maybeAdd(
out,
opts.remote.vmDisk[arch],
opts.local?.vmDisk[arch],
opts.cacheRoot,
)
for (const [name, agent] of Object.entries(opts.remote.agents)) {
maybeAdd(
out,
@@ -69,18 +76,20 @@ if (import.meta.main) {
`manifest fetch failed: ${manifestUrl} (${response.status})`,
)
}
const remote = (await response.json()) as AgentManifest
const remote = (await response.json()) as VmManifest
const localManifestPath = path.join(cacheRoot, 'vm', 'manifest.json')
const local = await readLocalManifest(localManifestPath)
const plan = planSync({ local, remote, cacheRoot, arches })
if (plan.length === 0) {
console.log('agent cache up to date')
console.log(`cache up to date at vmVersion ${remote.vmVersion}`)
process.exit(0)
}
console.log(`syncing ${plan.length} agent artifact(s)`)
console.log(
`syncing ${plan.length} artifact(s) for vmVersion ${remote.vmVersion}`,
)
for (const item of plan) {
await mkdir(path.dirname(item.destPath), { recursive: true })
const partial = `${item.destPath}.partial`
@@ -119,9 +128,9 @@ function getCacheDir(): string {
export async function readLocalManifest(
manifestPath: string,
): Promise<AgentManifest | null> {
): Promise<VmManifest | null> {
try {
return JSON.parse(await readFile(manifestPath, 'utf8')) as AgentManifest
return JSON.parse(await readFile(manifestPath, 'utf8')) as VmManifest
} catch (error) {
if ((error as NodeJS.ErrnoException).code === 'ENOENT') return null
throw error

View File

@@ -12,9 +12,11 @@ export interface AgentEntry {
tarballs: Record<Arch, Artifact>
}
export interface AgentManifest {
schemaVersion: 2
export interface VmManifest {
schemaVersion: 1
vmVersion: string
updatedAt: string
vmDisk: Record<Arch, Artifact>
agents: Record<string, AgentEntry>
}
@@ -25,6 +27,7 @@ export interface BundleAgent {
}
export interface Bundle {
vmVersion: string
agents: BundleAgent[]
}
@@ -34,9 +37,14 @@ export interface ArtifactInput {
}
export interface ArtifactInputs {
vmDisk: Record<Arch, ArtifactInput>
agents: Record<string, Record<Arch, ArtifactInput>>
}
export function qcow2Key(vmVersion: string, arch: Arch): string {
return `vm/browseros-vm-${vmVersion}-${arch}.qcow2.zst`
}
export function tarballKey(name: string, version: string, arch: Arch): string {
return `vm/images/${name}-${version}-${arch}.tar.gz`
}
@@ -45,7 +53,18 @@ export function buildManifest(
bundle: Bundle,
inputs: ArtifactInputs,
now: Date = new Date(),
): AgentManifest {
): VmManifest {
const vmDisk = {} as Record<Arch, Artifact>
for (const arch of ARCHES) {
const entry = inputs.vmDisk[arch]
if (!entry) throw new Error(`missing vmDisk inputs for arch ${arch}`)
vmDisk[arch] = {
key: qcow2Key(bundle.vmVersion, arch),
sha256: entry.sha256,
sizeBytes: entry.sizeBytes,
}
}
const agents: Record<string, AgentEntry> = {}
for (const agent of bundle.agents) {
const tarballs = {} as Record<Arch, Artifact>
@@ -68,8 +87,10 @@ export function buildManifest(
}
return {
schemaVersion: 2,
schemaVersion: 1,
vmVersion: bundle.vmVersion,
updatedAt: now.toISOString(),
vmDisk,
agents,
}
}

View File

@@ -2,16 +2,18 @@
import { mkdir, readFile, stat, writeFile } from 'node:fs/promises'
import path from 'node:path'
import { parseArgs } from 'node:util'
import { ARCHES } from './common/arch'
import { ARCHES, type Arch } from './common/arch'
import { fetchWithTimeout } from './common/fetch'
import {
type AgentEntry,
type AgentManifest,
type Artifact,
type ArtifactInputs,
type Bundle,
type BundleAgent,
buildManifest,
qcow2Key,
tarballKey,
type VmManifest,
} from './common/manifest'
import { sha256File } from './common/sha256'
@@ -32,10 +34,6 @@ const bundle = JSON.parse(
await readFile(path.join(pkgRoot, 'bundle.json'), 'utf8'),
) as Bundle
if (slice !== 'full' && !slice.startsWith('agents:')) {
throw new Error(`unknown slice: ${slice}`)
}
const baseline = values['merge-from']
? await loadBaseline(values['merge-from'])
: null
@@ -53,8 +51,8 @@ async function buildSlicedManifest(opts: {
bundle: Bundle
distDir: string
slice: string
baseline: AgentManifest | null
}): Promise<AgentManifest> {
baseline: VmManifest | null
}): Promise<VmManifest> {
if (opts.slice === 'full') {
return buildManifest(
opts.bundle,
@@ -66,6 +64,16 @@ async function buildSlicedManifest(opts: {
if (!baseline) throw new Error(`--slice ${opts.slice} requires --merge-from`)
const updatedAt = new Date().toISOString()
if (opts.slice === 'vm') {
return {
...baseline,
schemaVersion: 1,
vmVersion: opts.bundle.vmVersion,
updatedAt,
vmDisk: await readVmDisk(opts.bundle.vmVersion, opts.distDir),
}
}
if (opts.slice.startsWith('agents:')) {
const name = opts.slice.slice('agents:'.length)
const agent = opts.bundle.agents.find((entry) => entry.name === name)
@@ -73,7 +81,6 @@ async function buildSlicedManifest(opts: {
return {
...baseline,
schemaVersion: 2,
updatedAt,
agents: {
...baseline.agents,
@@ -103,10 +110,26 @@ async function readAllInputs(
}
return {
vmDisk: await readArtifactInputs((arch) =>
path.join(distDir, path.basename(qcow2Key(bundle.vmVersion, arch))),
),
agents,
}
}
async function readVmDisk(
vmVersion: string,
distDir: string,
): Promise<Record<Arch, Artifact>> {
const vmDisk = {} as Record<Arch, Artifact>
for (const arch of ARCHES) {
const key = qcow2Key(vmVersion, arch)
const artifactPath = path.join(distDir, path.basename(key))
vmDisk[arch] = { key, ...(await readArtifactInput(artifactPath)) }
}
return vmDisk
}
async function readAgentEntry(
agent: BundleAgent,
distDir: string,
@@ -120,6 +143,16 @@ async function readAgentEntry(
return { image: agent.image, version: agent.version, tarballs }
}
async function readArtifactInputs(
pathForArch: (arch: Arch) => string,
): Promise<Record<Arch, { sha256: string; sizeBytes: number }>> {
const out = {} as Record<Arch, { sha256: string; sizeBytes: number }>
for (const arch of ARCHES) {
out[arch] = await readArtifactInput(pathForArch(arch))
}
return out
}
async function readArtifactInput(
filePath: string,
): Promise<{ sha256: string; sizeBytes: number }> {
@@ -129,14 +162,14 @@ async function readArtifactInput(
}
}
async function loadBaseline(src: string): Promise<AgentManifest> {
async function loadBaseline(src: string): Promise<VmManifest> {
if (src.startsWith('http://') || src.startsWith('https://')) {
const response = await fetchWithTimeout(src)
if (!response.ok) {
throw new Error(`baseline fetch failed: ${src} (${response.status})`)
}
return (await response.json()) as AgentManifest
return (await response.json()) as VmManifest
}
return JSON.parse(await readFile(src, 'utf8')) as AgentManifest
return JSON.parse(await readFile(src, 'utf8')) as VmManifest
}

View File

@@ -0,0 +1,105 @@
#!/usr/bin/env bun
import { mkdtemp, rm, writeFile } from 'node:fs/promises'
import { tmpdir } from 'node:os'
import path from 'node:path'
import { parseArgs } from 'node:util'
import { $ } from 'bun'
import { type Arch, parseArch } from './common/arch'
const INSTANCE_NAME = 'browseros-vm-smoke'
const SOCKET_POLL_INTERVAL_MS = 2000
const SOCKET_POLL_TIMEOUT_MS = 120_000
type BunRequestInit = RequestInit & { unix?: string }
const { values } = parseArgs({
args: Bun.argv.slice(2),
options: {
qcow: { type: 'string' },
arch: { type: 'string', default: 'x64' },
limactl: { type: 'string', default: 'limactl' },
},
})
if (!values.qcow) {
console.error(
'usage: smoke:vm -- --qcow <path.qcow2.zst> [--arch arm64|x64] [--limactl limactl]',
)
process.exit(1)
}
const arch = parseArch(values.arch ?? 'x64')
await bootAndProbe(values.qcow, arch, values.limactl ?? 'limactl')
console.log('vm smoke test passed')
async function bootAndProbe(
qcowZstPath: string,
arch: Arch,
limactl: string,
): Promise<void> {
const workDir = await mkdtemp(path.join(tmpdir(), 'browseros-vm-smoke-'))
const qcowPath = path.join(workDir, 'disk.qcow2')
const configPath = path.join(workDir, 'lima.yaml')
const sockPath = path.join(workDir, 'podman.sock')
try {
await $`zstd -d -f -o ${qcowPath} ${qcowZstPath}`.quiet()
await writeFile(configPath, composeLimaConfig(qcowPath, arch, sockPath))
await $`${limactl} start --name=${INSTANCE_NAME} --tty=false ${configPath}`
await waitForSocket(sockPath)
await probePodmanSocket(sockPath)
} finally {
await $`${limactl} stop --force ${INSTANCE_NAME}`.quiet().nothrow()
await $`${limactl} delete --force ${INSTANCE_NAME}`.quiet().nothrow()
await rm(workDir, { recursive: true, force: true })
}
}
function composeLimaConfig(
qcowPath: string,
arch: Arch,
sockPath: string,
): string {
return `vmType: qemu
images:
- location: ${qcowPath}
arch: ${limaArch(arch)}
containerd:
system: false
user: false
mounts: []
provision: []
portForwards:
- guestSocket: /run/podman/podman.sock
hostSocket: ${sockPath}
proto: unix
`
}
function limaArch(arch: Arch): 'aarch64' | 'x86_64' {
return arch === 'arm64' ? 'aarch64' : 'x86_64'
}
async function waitForSocket(sockPath: string): Promise<void> {
const deadline = Date.now() + SOCKET_POLL_TIMEOUT_MS
while (Date.now() < deadline) {
if (await Bun.file(sockPath).exists()) return
await Bun.sleep(SOCKET_POLL_INTERVAL_MS)
}
throw new Error(
`podman socket did not appear within ${SOCKET_POLL_TIMEOUT_MS}ms: ${sockPath}`,
)
}
async function probePodmanSocket(sockPath: string): Promise<void> {
const init: BunRequestInit = { unix: sockPath }
const response = await fetch('http://d/v4.0.0/libpod/_ping', init)
if (!response.ok) {
throw new Error(`podman ping failed: ${response.status}`)
}
const body = (await response.text()).trim()
if (body !== 'OK') {
throw new Error(`podman ping body unexpected: ${body}`)
}
}

View File

@@ -1,80 +0,0 @@
# BrowserOS VM -- consumed directly by limactl, no build step.
# Based on Lima's built-in podman.yaml + _images/debian-12 templates.
# https://github.com/lima-vm/lima/tree/master/templates
minimumLimaVersion: 2.0.0
vmType: vz
cpus: 2
memory: 2GiB
disk: 10GiB
# Pinned Debian 12 genericcloud -- matches the deprecated disk pipeline pin.
# Bump in lockstep with upstream when provisioning changes.
images:
- location: "https://cloud.debian.org/images/cloud/bookworm/20260413-2447/debian-12-genericcloud-arm64-20260413-2447.qcow2"
arch: aarch64
digest: "sha512:15ad6c52e255c84eb0e91001c5907b27199d8a7164d8ac172cfe9c92850dfaf606a6c3161d6af7f0fd5a5fef2aa8dcd9a23c2eb0fedbfcddb38e2bc306cba98f"
- location: "https://cloud.debian.org/images/cloud/bookworm/20260413-2447/debian-12-genericcloud-amd64-20260413-2447.qcow2"
arch: x86_64
digest: "sha512:db11b13c4efcc37828ffadae521d101e85079d349e1418074087bb7d306f11caccdc2b0b539d6fd50d623d40a898f83c6137268a048d7700397dc35b7dcbc927"
# Fallbacks for when Debian rotates the dated snapshot.
- location: https://cloud.debian.org/images/cloud/bookworm/latest/debian-12-genericcloud-arm64.qcow2
arch: aarch64
- location: https://cloud.debian.org/images/cloud/bookworm/latest/debian-12-genericcloud-amd64.qcow2
arch: x86_64
# Host-state isolation -- matches spec D5 / D7 defaults.
mounts: []
# We run podman, not containerd.
containerd:
system: false
user: false
provision:
- mode: system
script: |
#!/bin/bash
set -eux -o pipefail
if [ -e /etc/browseros-vm-provisioned ]; then exit 0; fi
DEBIAN_FRONTEND=noninteractive apt-get update
DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
podman crun fuse-overlayfs slirp4netns ca-certificates
systemctl enable --now podman.socket
# Keep Docker config out of the image pull path (spec D7).
mkdir -p /etc/containers
containers_auth=/etc/containers/auth
printf '{}\n' > "${containers_auth}.json"
# Single-tenant appliance user (spec D7).
id browseros >/dev/null 2>&1 || useradd --create-home --uid 1000 --shell /bin/bash browseros
usermod -aG sudo browseros
# Version marker consumed by the runtime (WS4).
printf 'provisioned:%s\n' "$(date -u +%Y-%m-%dT%H:%M:%SZ)" > /etc/browseros-vm-version
apt-get clean
rm -rf /var/lib/apt/lists/*
touch /etc/browseros-vm-provisioned
# Block `limactl start` until podman is ready.
probes:
- script: |
#!/bin/bash
set -eux -o pipefail
if ! timeout 60s bash -c 'until systemctl is-active podman.socket >/dev/null; do sleep 2; done'; then
echo >&2 "podman.socket not active after 60s"
exit 1
fi
hint: See /var/log/cloud-init-output.log inside the guest
# Expose guest podman socket to host for the TypeScript runtime to dial.
portForwards:
- guestSocket: "/run/podman/podman.sock"
hostSocket: "{{.Dir}}/sock/podman.sock"

View File

@@ -8,7 +8,7 @@ import {
readLocalManifest,
selectSyncArches,
} from '../scripts/cache-sync'
import type { AgentManifest } from '../scripts/common/manifest'
import type { VmManifest } from '../scripts/common/manifest'
import { sha256File } from '../scripts/common/sha256'
const openclaw = {
@@ -16,53 +16,45 @@ const openclaw = {
version: '2026.4.12',
}
const claudeCode = {
image: 'ghcr.io/anthropics/claude-code',
version: '2026.4.10',
}
function manifest(tarSha: string, includeSecondAgent = false): AgentManifest {
const agents: AgentManifest['agents'] = {
openclaw: {
...openclaw,
tarballs: {
arm64: {
key: 'vm/images/openclaw-2026.4.12-arm64.tar.gz',
sha256: `${tarSha}-arm64`,
sizeBytes: 201,
},
x64: {
key: 'vm/images/openclaw-2026.4.12-x64.tar.gz',
sha256: `${tarSha}-x64`,
sizeBytes: 202,
function manifest(
vmVersion: string,
diskSha: string,
tarSha: string,
): VmManifest {
return {
schemaVersion: 1,
vmVersion,
updatedAt: '2026-04-22T00:00:00.000Z',
vmDisk: {
arm64: {
key: `vm/browseros-vm-${vmVersion}-arm64.qcow2.zst`,
sha256: `${diskSha}-arm64`,
sizeBytes: 101,
},
x64: {
key: `vm/browseros-vm-${vmVersion}-x64.qcow2.zst`,
sha256: `${diskSha}-x64`,
sizeBytes: 102,
},
},
agents: {
openclaw: {
...openclaw,
tarballs: {
arm64: {
key: 'vm/images/openclaw-2026.4.12-arm64.tar.gz',
sha256: `${tarSha}-arm64`,
sizeBytes: 201,
},
x64: {
key: 'vm/images/openclaw-2026.4.12-x64.tar.gz',
sha256: `${tarSha}-x64`,
sizeBytes: 202,
},
},
},
},
}
if (includeSecondAgent) {
agents['claude-code'] = {
...claudeCode,
tarballs: {
arm64: {
key: 'vm/images/claude-code-2026.4.10-arm64.tar.gz',
sha256: `${tarSha}-claude-arm64`,
sizeBytes: 301,
},
x64: {
key: 'vm/images/claude-code-2026.4.10-x64.tar.gz',
sha256: `${tarSha}-claude-x64`,
sizeBytes: 302,
},
},
}
}
return {
schemaVersion: 2,
updatedAt: '2026-04-22T00:00:00.000Z',
agents,
}
}
function keys(plan: PlanItem[]): string[] {
@@ -70,33 +62,36 @@ function keys(plan: PlanItem[]): string[] {
}
describe('planSync', () => {
it('downloads every selected-arch agent artifact for a fresh cache', () => {
const remote = manifest('t1')
it('downloads every selected-arch artifact for a fresh cache', () => {
const remote = manifest('2026.04.22', 'd1', 't1')
expect(
keys(planSync({ local: null, remote, cacheRoot: '/c', arches: ['x64'] })),
).toEqual(['vm/images/openclaw-2026.4.12-x64.tar.gz'])
).toEqual([
'vm/browseros-vm-2026.04.22-x64.qcow2.zst',
'vm/images/openclaw-2026.4.12-x64.tar.gz',
])
})
it('does nothing when the local manifest matches the remote manifest', () => {
const remote = manifest('t1')
const remote = manifest('2026.04.22', 'd1', 't1')
expect(
planSync({ local: remote, remote, cacheRoot: '/c', arches: ['x64'] }),
).toEqual([])
})
it('downloads only agent artifacts whose sha256 changed', () => {
const local = manifest('old-tar')
const remote = manifest('new-tar')
it('downloads only artifacts whose sha256 changed', () => {
const local = manifest('2026.04.20', 'd-old', 't1')
const remote = manifest('2026.04.22', 'd-new', 't1')
expect(
keys(planSync({ local, remote, cacheRoot: '/c', arches: ['x64'] })),
).toEqual(['vm/images/openclaw-2026.4.12-x64.tar.gz'])
).toEqual(['vm/browseros-vm-2026.04.22-x64.qcow2.zst'])
})
it('supports syncing all release arches', () => {
const remote = manifest('t1')
const remote = manifest('2026.04.22', 'd1', 't1')
expect(
planSync({
@@ -105,7 +100,7 @@ describe('planSync', () => {
cacheRoot: '/c',
arches: ['arm64', 'x64'],
}),
).toHaveLength(2)
).toHaveLength(4)
})
it('selects host arch by default and both arches when requested', () => {
@@ -149,31 +144,43 @@ describe('emit-manifest', () => {
dir = null
})
it('rejects the retired vm slice', async () => {
it('merges a vm slice while preserving agents from the baseline', async () => {
dir = await mkdtemp(path.join(tmpdir(), 'browseros-emit-vm-'))
const distDir = path.join(dir, 'dist')
await writeVmFiles(distDir)
const result = await runEmitManifest(
[
'--slice',
'vm',
'--dist-dir',
path.join(dir, 'dist'),
'--out',
path.join(dir, 'manifest.json'),
],
false,
const baseline = manifest('2026.04.20', 'old-disk', 'old-tar')
const baselinePath = path.join(dir, 'baseline.json')
const outPath = path.join(dir, 'manifest.json')
await writeJson(baselinePath, baseline)
await runEmitManifest([
'--slice',
'vm',
'--dist-dir',
distDir,
'--merge-from',
baselinePath,
'--out',
outPath,
])
const merged = JSON.parse(await readFile(outPath, 'utf8')) as VmManifest
expect(merged.vmVersion).toBe('2026.04.22')
expect(merged.agents).toEqual(baseline.agents)
expect(merged.vmDisk.x64.sha256).toBe(
await sha256File(
path.join(distDir, 'browseros-vm-2026.04.22-x64.qcow2.zst'),
),
)
expect(result.code).toBe(1)
expect(result.stderr).toContain('unknown slice: vm')
})
it('merges an agent slice while preserving other agents from the baseline', async () => {
it('merges an agent slice while preserving vmDisk from the baseline', async () => {
dir = await mkdtemp(path.join(tmpdir(), 'browseros-emit-agent-'))
const distDir = path.join(dir, 'dist')
await writeAgentFiles(distDir)
const baseline = manifest('old-tar', true)
const baseline = manifest('2026.04.20', 'old-disk', 'old-tar')
const baselinePath = path.join(dir, 'baseline.json')
const outPath = path.join(dir, 'manifest.json')
await writeJson(baselinePath, baseline)
@@ -189,9 +196,9 @@ describe('emit-manifest', () => {
outPath,
])
const merged = JSON.parse(await readFile(outPath, 'utf8')) as AgentManifest
expect(merged.schemaVersion).toBe(2)
expect(merged.agents['claude-code']).toEqual(baseline.agents['claude-code'])
const merged = JSON.parse(await readFile(outPath, 'utf8')) as VmManifest
expect(merged.vmVersion).toBe('2026.04.20')
expect(merged.vmDisk).toEqual(baseline.vmDisk)
expect(merged.agents.openclaw.tarballs.arm64.sha256).toBe(
await sha256File(
path.join(distDir, 'images/openclaw-2026.4.12-arm64.tar.gz'),
@@ -201,13 +208,15 @@ describe('emit-manifest', () => {
it('fails slice emission without a merge baseline', async () => {
dir = await mkdtemp(path.join(tmpdir(), 'browseros-emit-fail-'))
const distDir = path.join(dir, 'dist')
await writeVmFiles(distDir)
const result = await runEmitManifest(
[
'--slice',
'agents:openclaw',
'vm',
'--dist-dir',
path.join(dir, 'dist'),
distDir,
'--out',
path.join(dir, 'out.json'),
],
@@ -215,12 +224,22 @@ describe('emit-manifest', () => {
)
expect(result.code).toBe(1)
expect(result.stderr).toContain(
'--slice agents:openclaw requires --merge-from',
)
expect(result.stderr).toContain('--slice vm requires --merge-from')
})
})
async function writeVmFiles(distDir: string): Promise<void> {
await mkdir(distDir, { recursive: true })
await writeFile(
path.join(distDir, 'browseros-vm-2026.04.22-arm64.qcow2.zst'),
'arm disk',
)
await writeFile(
path.join(distDir, 'browseros-vm-2026.04.22-x64.qcow2.zst'),
'x64 disk',
)
}
async function writeAgentFiles(distDir: string): Promise<void> {
await mkdir(path.join(distDir, 'images'), { recursive: true })
await writeFile(

View File

@@ -6,11 +6,13 @@ import {
type ArtifactInputs,
type Bundle,
buildManifest,
qcow2Key,
tarballKey,
} from '../scripts/common/manifest'
import { verifySha256 } from '../scripts/common/sha256'
const bundle: Bundle = {
vmVersion: '2026.04.22',
agents: [
{
name: 'openclaw',
@@ -21,6 +23,10 @@ const bundle: Bundle = {
}
const inputs: ArtifactInputs = {
vmDisk: {
arm64: { sha256: 'disk-arm', sizeBytes: 11 },
x64: { sha256: 'disk-x64', sizeBytes: 12 },
},
agents: {
openclaw: {
arm64: { sha256: 'tar-arm', sizeBytes: 21 },
@@ -31,24 +37,32 @@ const inputs: ArtifactInputs = {
describe('manifest helpers', () => {
it('builds release artifact keys', () => {
expect(qcow2Key('2026.04.22', 'arm64')).toBe(
'vm/browseros-vm-2026.04.22-arm64.qcow2.zst',
)
expect(tarballKey('openclaw', '2026.4.12', 'x64')).toBe(
'vm/images/openclaw-2026.4.12-x64.tar.gz',
)
})
it('builds an agents-only manifest from bundle metadata and artifact inputs', () => {
it('builds a manifest from bundle metadata and artifact inputs', () => {
const manifest = buildManifest(
bundle,
inputs,
new Date('2026-04-22T00:00:00.000Z'),
)
for (const field of ['vm' + 'Version', 'vm' + 'Disk']) {
expect(Object.hasOwn(manifest, field)).toBe(false)
}
expect(manifest).toMatchObject({
schemaVersion: 2,
schemaVersion: 1,
vmVersion: '2026.04.22',
updatedAt: '2026-04-22T00:00:00.000Z',
vmDisk: {
arm64: {
key: 'vm/browseros-vm-2026.04.22-arm64.qcow2.zst',
sha256: 'disk-arm',
sizeBytes: 11,
},
},
agents: {
openclaw: {
image: 'ghcr.io/openclaw/openclaw',
@@ -65,9 +79,17 @@ describe('manifest helpers', () => {
})
})
it('fails when required tarball inputs are missing', () => {
it('fails when required artifact inputs are missing', () => {
expect(() =>
buildManifest(bundle, {
vmDisk: { arm64: inputs.vmDisk.arm64 } as ArtifactInputs['vmDisk'],
agents: inputs.agents,
}),
).toThrow('missing vmDisk inputs for arch x64')
expect(() =>
buildManifest(bundle, {
vmDisk: inputs.vmDisk,
agents: { openclaw: { arm64: inputs.agents.openclaw.arm64 } },
} as unknown as ArtifactInputs),
).toThrow('missing tarball inputs for openclaw/x64')

View File

@@ -21,16 +21,6 @@
"os": ["macos"],
"arch": ["x64"],
"executable": true
},
{
"name": "BrowserOS VM Lima template",
"source": {
"type": "local",
"path": "packages/build-tools/template/browseros-vm.yaml"
},
"destination": "resources/vm/browseros-vm.yaml",
"os": ["macos"],
"arch": ["arm64", "x64"]
}
]
}