Compare commits

...

5 Commits

Author SHA1 Message Date
Nikhil Sonti
ae3e79bb43 fix: address PR review comments for 0416-openclaw_cli_http_redesign 2026-04-16 16:22:27 -07:00
Nikhil Sonti
d2fb785367 feat: move OpenClaw control plane to CLI and HTTP 2026-04-16 15:34:06 -07:00
Nikhil
ebd3200cfe feat(build): add arm64-only macOS release config (#728)
Introduces release.macos.arm64.yaml for single-architecture arm64
macOS release builds. Mirrors the windows/linux single-arch pattern
(configure -> compile -> sign_macos -> package_macos -> upload),
skipping the universal_build module to avoid the x64 cross-compile
and lipo merge. Reuses the sparkle_setup step and the same
notarization env vars as the universal macOS config.
2026-04-16 13:09:46 -07:00
Nikhil
4172daa130 chore: bump PATCH and OFFSET (#727) 2026-04-16 13:05:01 -07:00
Nikhil
c1b1e53a86 feat(ota): bundle full server resources tree in Sparkle payload (#726)
* feat(ota): bundle full server resources tree (server + third_party bins)

The OTA Sparkle payload now ships the complete resources/ tree the agent
build produced, not just browseros_server. Every third-party binary (bun,
ripgrep, podman, gvproxy, vfkit, krunkit, podman-mac-helper, win-sshproxy)
flows to OTA-updated installs so podman integration works for users on the
OTA channel, matching fresh Chromium-build installs.

Extract the per-binary sign table into build/common/server_binaries.py so
the Chromium-build sign path (modules/sign/) and OTA sign path (modules/ota/)
share a single source of truth. Adding a new third-party dep is now a
one-file edit that both paths pick up automatically; unknown executables
under resources/bin/ are a hard error at release time.

* fix(ota): address review comments on bundle signing flow

- Avoid double-zipping during notarization: add notarize_macos_zip for
  pre-built Sparkle bundles so notarytool submits the zip directly
  instead of re-wrapping it through ditto --keepParent (Apple's service
  does not descend into nested archives). Keep notarize_macos_binary for
  single-binary callers. Share credential setup + submit logic via
  internal helpers.
- Fail fast on unknown executables in sign_server_bundle_macos: collect
  the unknown-files list before any codesign call so a missing shared-
  table entry aborts in seconds, not after a full signing round.
- Drop dead get_entitlements_path helper (no callers remain after the
  bundle refactor).

* fix(ota): address PR review comments (greptile + claude)

- sign_server_bundle_macos filters to executables only (p.is_file() +
  not p.is_symlink() + os.access X_OK) before applying the unknown-file
  guard. Non-Mach-O files (configs, dylibs, etc.) under resources/bin/
  no longer cause misleading 'unknown executable' hard failures.
- sign_server_bundle_windows now hard-errors on a missing expected
  binary instead of silently skipping it. Symmetric with the macOS
  guard — an incomplete bundle must not publish.
- ServerOTAModule.execute() uses tempfile.TemporaryDirectory context
  managers for both the download and staging roots so they are cleaned
  up on every path, including failures.
- Per-platform sign/notarize/Sparkle-sign failures now raise RuntimeError
  instead of silently skipping the platform — a release pipeline can no
  longer omit a target while reporting success.
- Move import os and import shutil to the top of ota/sign_binary.py.
- Drop unused log_error import from ota/server.py.

* chore: bump server
2026-04-16 12:59:49 -07:00
23 changed files with 1688 additions and 626 deletions

View File

@@ -1,6 +1,6 @@
{
"name": "@browseros/server",
"version": "0.0.85",
"version": "0.0.87",
"description": "BrowserOS server",
"type": "module",
"main": "./src/index.ts",

View File

@@ -0,0 +1,126 @@
/**
* @license
* Copyright 2025 BrowserOS
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
type LogFn = (line: string) => void
interface ContainerExecutor {
execInContainer(command: string[], onLog?: LogFn): Promise<number>
}
interface RawAgentRecord {
id: string
name?: string
workspace: string
model?: string
}
export interface OpenClawAgentRecord {
agentId: string
name: string
workspace: string
model?: string
}
export class OpenClawAdminClient {
constructor(
private readonly executor: ContainerExecutor,
private readonly getToken: () => Promise<string>,
) {}
async listAgents(): Promise<OpenClawAgentRecord[]> {
const records = await this.runJsonCommand<RawAgentRecord[]>([
'agents',
'list',
'--json',
])
return records.map((record) => ({
agentId: record.id,
name: record.name ?? record.id,
workspace: record.workspace,
model: record.model,
}))
}
async createAgent(input: {
name: string
workspace: string
model?: string
}): Promise<OpenClawAgentRecord> {
const args = ['agents', 'add', input.name, '--workspace', input.workspace]
if (input.model) {
args.push('--model', input.model)
}
args.push('--non-interactive', '--json')
await this.runCommand(args)
const agents = await this.listAgents()
const agent = agents.find((entry) => entry.agentId === input.name)
if (!agent) {
throw new Error(`Created agent ${input.name} was not found in agent list`)
}
return agent
}
async deleteAgent(agentId: string): Promise<void> {
await this.runCommand(['agents', 'delete', agentId, '--force', '--json'])
}
async probe(): Promise<void> {
await this.listAgents()
}
private async runJsonCommand<T>(args: string[]): Promise<T> {
const output = await this.runCommand(args)
return parseJsonOutput<T>(output)
}
private async runCommand(args: string[]): Promise<string> {
const output: string[] = []
const token = await this.getToken()
const command = ['node', 'dist/index.js', ...args, '--token', token]
const exitCode = await this.executor.execInContainer(command, (line) =>
output.push(line),
)
if (exitCode !== 0) {
const detail = output.join('\n').trim()
throw new Error(
detail || `OpenClaw command failed (${args.slice(0, 2).join(' ')})`,
)
}
return output.join('\n').trim()
}
}
function parseJsonOutput<T>(output: string): T {
const direct = tryParseJson<T>(output)
if (direct !== null) return direct
const start = output.search(/[[{]/)
if (start >= 0) {
const sliced = tryParseJson<T>(output.slice(start))
if (sliced !== null) return sliced
}
throw new Error(
`Failed to parse OpenClaw JSON output: ${output.slice(0, 200)}`,
)
}
function tryParseJson<T>(value: string): T | null {
const trimmed = value.trim()
if (!trimmed) return null
try {
return JSON.parse(trimmed) as T
} catch {
return null
}
}

View File

@@ -0,0 +1,245 @@
/**
* @license
* Copyright 2025 BrowserOS
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
import { createParser, type EventSourceMessage } from 'eventsource-parser'
import type { OpenClawStreamEvent } from './openclaw-types'
export interface OpenClawChatRequest {
agentId: string
sessionKey: string
message: string
signal?: AbortSignal
}
export class OpenClawHttpChatClient {
constructor(
private readonly port: number,
private readonly getToken: () => Promise<string>,
) {}
async streamChat(
input: OpenClawChatRequest,
): Promise<ReadableStream<OpenClawStreamEvent>> {
const response = await this.fetchChat(input)
const body = response.body
if (!body) {
throw new Error('OpenClaw chat response had no body')
}
return createEventStream(body, input.signal)
}
private async fetchChat(input: OpenClawChatRequest): Promise<Response> {
const token = await this.getToken()
const response = await fetch(
`http://127.0.0.1:${this.port}/v1/chat/completions`,
{
method: 'POST',
headers: {
Authorization: `Bearer ${token}`,
'Content-Type': 'application/json',
},
body: JSON.stringify({
model: resolveAgentModel(input.agentId),
stream: true,
messages: [{ role: 'user', content: input.message }],
user: `browseros:${input.agentId}:${input.sessionKey}`,
}),
signal: input.signal,
},
)
if (response.ok) {
return response
}
const detail = await response.text()
throw new Error(
detail || `OpenClaw chat failed with status ${response.status}`,
)
}
}
function resolveAgentModel(agentId: string): string {
return agentId === 'main' ? 'openclaw/default' : `openclaw/${agentId}`
}
function createEventStream(
body: ReadableStream<Uint8Array>,
signal?: AbortSignal,
): ReadableStream<OpenClawStreamEvent> {
return new ReadableStream<OpenClawStreamEvent>({
start(controller) {
void pumpChatEvents(body, controller, signal)
},
})
}
async function pumpChatEvents(
body: ReadableStream<Uint8Array>,
controller: ReadableStreamDefaultController<OpenClawStreamEvent>,
signal?: AbortSignal,
): Promise<void> {
const reader = body.getReader()
const decoder = new TextDecoder()
let text = ''
let done = false
const parser = createParser({
onEvent(message) {
if (done) return
const nextText = updateAccumulatedText(message, text)
done = handleMessage(message, controller, nextText, done)
if (!done) {
text = nextText
}
},
})
try {
while (true) {
if (signal?.aborted) {
await reader.cancel()
controller.close()
return
}
const { done: streamDone, value } = await reader.read()
if (streamDone) break
parser.feed(decoder.decode(value, { stream: true }))
}
} catch (error) {
if (!done) {
controller.enqueue({
type: 'error',
data: {
message: error instanceof Error ? error.message : String(error),
},
})
controller.close()
}
} finally {
if (!done) {
controller.close()
}
reader.releaseLock()
}
}
function handleMessage(
message: EventSourceMessage,
controller: ReadableStreamDefaultController<OpenClawStreamEvent>,
text: string,
done: boolean,
): boolean {
if (message.data === '[DONE]') {
return finishStream(controller, text, done)
}
const chunk = parseChunk(message.data)
if (!chunk) {
controller.enqueue({
type: 'error',
data: { message: 'Failed to parse OpenClaw chat stream chunk' },
})
controller.close()
return true
}
for (const event of mapChunkToEvents(chunk)) {
controller.enqueue(event)
}
return hasFinishReason(chunk) ? finishStream(controller, text, done) : false
}
function updateAccumulatedText(
message: EventSourceMessage,
text: string,
): string {
const chunk = parseChunk(message.data)
if (!chunk) return text
let next = text
for (const choice of readChoices(chunk)) {
const delta = readDeltaText(choice)
if (delta) {
next += delta
}
}
return next
}
function finishStream(
controller: ReadableStreamDefaultController<OpenClawStreamEvent>,
text: string,
done: boolean,
): boolean {
if (!done) {
controller.enqueue({
type: 'done',
data: { text },
})
controller.close()
}
return true
}
function mapChunkToEvents(
chunk: Record<string, unknown>,
): OpenClawStreamEvent[] {
const events: OpenClawStreamEvent[] = []
for (const choice of readChoices(chunk)) {
const delta = readDeltaText(choice)
if (delta) {
events.push({
type: 'text-delta',
data: { text: delta },
})
}
}
return events
}
function hasFinishReason(chunk: Record<string, unknown>): boolean {
return readChoices(chunk).some((choice) => !!readFinishReason(choice))
}
function readChoices(
chunk: Record<string, unknown>,
): Array<Record<string, unknown>> {
const choices = chunk.choices
return Array.isArray(choices)
? choices.filter(
(choice): choice is Record<string, unknown> =>
!!choice && typeof choice === 'object',
)
: []
}
function readDeltaText(choice: Record<string, unknown>): string {
const delta = choice.delta
if (!delta || typeof delta !== 'object') return ''
const content = (delta as Record<string, unknown>).content
return typeof content === 'string' ? content : ''
}
function readFinishReason(choice: Record<string, unknown>): string | null {
const reason = choice.finish_reason
return typeof reason === 'string' && reason ? reason : null
}
function parseChunk(data: string): Record<string, unknown> | null {
try {
return JSON.parse(data) as Record<string, unknown>
} catch {
return null
}
}

View File

@@ -4,14 +4,17 @@
* SPDX-License-Identifier: AGPL-3.0-or-later
*
* Main orchestrator for OpenClaw integration.
* Container lifecycle via Podman, agent CRUD via Gateway WS RPC,
* Container lifecycle via Podman, agent CRUD via in-container CLI,
* chat via HTTP /v1/chat/completions proxy.
*/
import { existsSync } from 'node:fs'
import { mkdir, readFile, writeFile } from 'node:fs/promises'
import { join, resolve } from 'node:path'
import { OPENCLAW_GATEWAY_PORT } from '@browseros/shared/constants/openclaw'
import {
OPENCLAW_CONTAINER_HOME,
OPENCLAW_GATEWAY_PORT,
} from '@browseros/shared/constants/openclaw'
import { DEFAULT_PORTS } from '@browseros/shared/constants/ports'
import type {
BrowserOSAgentRoleId,
@@ -28,11 +31,9 @@ import {
OpenClawProtectedAgentError,
} from './errors'
import {
ensureClientIdentity,
type GatewayAgentEntry,
GatewayClient,
type OpenClawStreamEvent,
} from './gateway-client'
OpenClawAdminClient,
type OpenClawAgentRecord,
} from './openclaw-admin-client'
import {
buildBootstrapConfig,
buildEnvFile,
@@ -42,6 +43,8 @@ import {
resolveProviderKeys,
resolveProviderModel,
} from './openclaw-config'
import { OpenClawHttpChatClient } from './openclaw-http-chat-client'
import type { OpenClawStreamEvent } from './openclaw-types'
import { getPodmanRuntime } from './podman-runtime'
import {
buildRoleBootstrapFiles,
@@ -62,10 +65,12 @@ export type OpenClawControlPlaneStatus =
| 'connecting'
| 'connected'
| 'reconnecting'
// Retained for extension compatibility while the UI still branches on it.
| 'recovering'
| 'failed'
export type OpenClawGatewayRecoveryReason =
// Retained for extension compatibility while the UI still renders these reasons.
| 'transient_disconnect'
| 'signature_expired'
| 'pairing_required'
@@ -92,7 +97,7 @@ export interface OpenClawStatusResponse {
lastRecoveryReason: OpenClawGatewayRecoveryReason | null
}
export interface OpenClawAgentEntry extends GatewayAgentEntry {
export interface OpenClawAgentEntry extends OpenClawAgentRecord {
role?: BrowserOSAgentRoleSummary
}
@@ -106,7 +111,8 @@ export interface SetupInput {
export class OpenClawService {
private runtime: ContainerRuntime
private gateway: GatewayClient | null = null
private adminClient: OpenClawAdminClient
private chatClient: OpenClawHttpChatClient
private openclawDir: string
private port = OPENCLAW_GATEWAY_PORT
private token: string
@@ -115,13 +121,20 @@ export class OpenClawService {
private controlPlaneStatus: OpenClawControlPlaneStatus = 'disconnected'
private lastGatewayError: string | null = null
private lastRecoveryReason: OpenClawGatewayRecoveryReason | null = null
private gatewayReconnectPromise: Promise<void> | null = null
private stopLogTail: (() => void) | null = null
constructor(browserosServerPort?: number) {
this.openclawDir = getOpenClawDir()
this.runtime = new ContainerRuntime(getPodmanRuntime(), this.openclawDir)
this.token = crypto.randomUUID()
this.adminClient = new OpenClawAdminClient(
this.runtime,
async () => this.token,
)
this.chatClient = new OpenClawHttpChatClient(
this.port,
async () => this.token,
)
this.browserosServerPort = browserosServerPort ?? DEFAULT_PORTS.server
}
@@ -199,32 +212,26 @@ export class OpenClawService {
throw new Error(this.lastError)
}
// Generate client device identity for WS auth
logProgress('Generating client device identity...')
ensureClientIdentity(this.openclawDir)
this.controlPlaneStatus = 'connecting'
logProgress('Probing OpenClaw control plane...')
await this.runControlPlaneCall(() => this.adminClient.probe())
logProgress('Connecting to gateway...')
await this.connectGatewayResiliently(logProgress)
// Ensure main agent exists (gateway may auto-create it)
// biome-ignore lint/style/noNonNullAssertion: gateway is guaranteed connected after connectGateway()
const existingAgents = await this.gateway!.listAgents()
const existingAgents = await this.listAgents()
logger.info('Fetched existing OpenClaw agents after setup', {
count: existingAgents.length,
names: existingAgents.map((agent) => agent.name),
})
const hasMain = existingAgents.some((a) => a.agentId === 'main')
if (!hasMain) {
logProgress('Creating main agent...')
const model = resolveProviderModel(input)
// biome-ignore lint/style/noNonNullAssertion: gateway is connected
await this.gateway!.createAgent({
name: 'main',
workspace: GatewayClient.agentWorkspace('main'),
model,
})
if (existingAgents.some((agent) => agent.agentId === 'main')) {
logProgress('Main agent detected')
} else {
logProgress('Main agent already exists')
logProgress('Creating main agent...')
await this.runControlPlaneCall(() =>
this.adminClient.createAgent({
name: 'main',
workspace: this.getContainerWorkspacePath('main'),
model: resolveProviderModel(input),
}),
)
}
this.lastError = null
@@ -253,15 +260,16 @@ export class OpenClawService {
throw new Error(this.lastError)
}
logProgress('Connecting to gateway...')
await this.connectGatewayResiliently(logProgress)
this.controlPlaneStatus = 'connecting'
logProgress('Probing OpenClaw control plane...')
await this.runControlPlaneCall(() => this.adminClient.probe())
this.lastError = null
logger.info('OpenClaw gateway started', { port: this.port })
}
async stop(): Promise<void> {
logger.info('Stopping OpenClaw service', { port: this.port })
this.disconnectGateway()
this.controlPlaneStatus = 'disconnected'
this.stopGatewayLogTail()
await this.runtime.composeStop()
logger.info('OpenClaw container stopped')
@@ -273,7 +281,7 @@ export class OpenClawService {
port: this.port,
})
this.disconnectGateway()
this.controlPlaneStatus = 'reconnecting'
this.stopGatewayLogTail()
logProgress('Loading gateway auth token...')
await this.loadTokenFromEnv()
@@ -289,8 +297,8 @@ export class OpenClawService {
throw new Error(this.lastError)
}
logProgress('Connecting to gateway...')
await this.connectGatewayResiliently(logProgress)
logProgress('Probing OpenClaw control plane...')
await this.runControlPlaneCall(() => this.adminClient.probe())
this.lastError = null
logProgress('Gateway restarted successfully')
logger.info('OpenClaw gateway restarted', { port: this.port })
@@ -311,15 +319,14 @@ export class OpenClawService {
logProgress('Reloading gateway auth token...')
await this.loadTokenFromEnv()
this.disconnectGateway()
this.controlPlaneStatus = 'reconnecting'
logProgress('Reconnecting control plane...')
await this.ensureGatewayReady()
await this.runControlPlaneCall(() => this.adminClient.probe())
logProgress('Control plane connected')
}
async shutdown(): Promise<void> {
this.disconnectGateway()
this.controlPlaneStatus = 'disconnected'
this.stopGatewayLogTail()
try {
await this.runtime.composeStop()
@@ -370,12 +377,14 @@ export class OpenClawService {
: false
let agentCount = 0
if (ready && this.gateway?.isConnected) {
if (ready) {
try {
const agents = await this.gateway.listAgents()
const agents = await this.runControlPlaneCall(() =>
this.adminClient.listAgents(),
)
agentCount = agents.length
} catch {
// WS may be momentarily unavailable
// latest control plane error is captured by runControlPlaneCall
}
}
@@ -386,17 +395,13 @@ export class OpenClawService {
port: this.port,
agentCount,
error: this.lastError,
controlPlaneStatus: ready
? this.gateway?.isConnected
? 'connected'
: this.controlPlaneStatus
: 'disconnected',
controlPlaneStatus: ready ? this.controlPlaneStatus : 'disconnected',
lastGatewayError: this.lastGatewayError,
lastRecoveryReason: this.lastRecoveryReason,
}
}
// ── Agent Management (via WS RPC) ───────────────────────────────────
// ── Agent Management (via CLI) ──────────────────────────────────────
async createAgent(input: {
name: string
@@ -423,7 +428,7 @@ export class OpenClawService {
hasModel: !!input.modelId,
hasApiKey: !!input.apiKey,
})
await this.ensureGatewayReady()
await this.assertGatewayReady()
const configChanged = await this.mergeProviderConfigIfChanged(input)
const keysChanged =
@@ -441,19 +446,15 @@ export class OpenClawService {
}
const model = resolveProviderModel(input)
const gateway = this.gateway
if (!gateway) {
throw new Error('Gateway WS not connected')
}
let agent: GatewayAgentEntry
let agent: OpenClawAgentRecord
try {
agent = await gateway.createAgent({
name,
workspace: GatewayClient.agentWorkspace(name),
model,
})
agent = await this.runControlPlaneCall(() =>
this.adminClient.createAgent({
name,
workspace: this.getContainerWorkspacePath(name),
model,
}),
)
} catch (error) {
const message = error instanceof Error ? error.message : String(error)
if (message.includes('already exists')) {
@@ -463,10 +464,13 @@ export class OpenClawService {
}
if (input.roleId || input.customRole) {
await this.writeRoleBootstrapFiles(
name,
input.roleId ? resolveRoleTemplate(input.roleId) : input.customRole!,
)
const role = input.roleId
? resolveRoleTemplate(input.roleId)
: input.customRole
if (!role) {
throw new Error('Role bootstrap requested without a role definition')
}
await this.writeRoleBootstrapFiles(name, role)
}
const roleSummary = input.roleId
@@ -475,7 +479,7 @@ export class OpenClawService {
? toRoleSummary(input.customRole)
: undefined
logger.info('Agent created via WS RPC', {
logger.info('Agent created via CLI', {
agentId: agent.agentId,
roleId: input.roleId,
roleSource: roleSummary?.roleSource,
@@ -493,10 +497,11 @@ export class OpenClawService {
throw new OpenClawProtectedAgentError('Cannot delete the main agent')
}
await this.ensureGatewayReady()
await this.assertGatewayReady()
try {
// biome-ignore lint/style/noNonNullAssertion: ensureGatewayReady() guarantees a connected client
await this.gateway!.deleteAgent(agentId)
await this.runControlPlaneCall(() =>
this.adminClient.deleteAgent(agentId),
)
} catch (error) {
const message = error instanceof Error ? error.message : String(error)
if (message.includes('not found')) {
@@ -504,14 +509,15 @@ export class OpenClawService {
}
throw error
}
logger.info('Agent removed via WS RPC', { agentId })
logger.info('Agent removed via CLI', { agentId })
}
async listAgents(): Promise<OpenClawAgentEntry[]> {
await this.ensureGatewayReady()
await this.assertGatewayReady()
logger.debug('Listing OpenClaw agents')
// biome-ignore lint/style/noNonNullAssertion: ensureGatewayReady() guarantees a connected client
const agents = await this.gateway!.listAgents()
const agents = await this.runControlPlaneCall(() =>
this.adminClient.listAgents(),
)
return Promise.all(
agents.map(async (agent) => ({
...agent,
@@ -520,21 +526,26 @@ export class OpenClawService {
)
}
// ── Chat Stream (WS) ─────────────────────────────────────────────────
// ── Chat Stream (HTTP) ───────────────────────────────────────────────
async chatStream(
agentId: string,
sessionKey: string,
message: string,
): Promise<ReadableStream<OpenClawStreamEvent>> {
await this.ensureGatewayReady()
await this.assertGatewayReady()
logger.info('Starting OpenClaw chat stream', {
agentId,
sessionKey,
messageLength: message.length,
})
// biome-ignore lint/style/noNonNullAssertion: ensureGatewayReady() guarantees a connected client
return this.gateway!.chatStream(agentId, sessionKey, message)
return this.runControlPlaneCall(() =>
this.chatClient.streamChat({
agentId,
sessionKey,
message,
}),
)
}
// ── Provider Keys ────────────────────────────────────────────────────
@@ -587,7 +598,7 @@ export class OpenClawService {
}
}
await this.connectGatewayResiliently()
await this.runControlPlaneCall(() => this.adminClient.probe())
logger.info('OpenClaw gateway auto-started')
} catch (err) {
logger.warn('OpenClaw auto-start failed', {
@@ -596,260 +607,53 @@ export class OpenClawService {
}
}
private async connectGatewayResiliently(
onLog?: (msg: string) => void,
): Promise<void> {
const logProgress = this.createProgressLogger(onLog)
const existingConnection =
!!this.gateway || this.controlPlaneStatus !== 'disconnected'
this.controlPlaneStatus = existingConnection ? 'reconnecting' : 'connecting'
this.lastGatewayError = null
this.lastRecoveryReason = null
// ── Internal ─────────────────────────────────────────────────────────
private async assertGatewayReady(): Promise<void> {
const portReady = await this.runtime.isReady(this.port)
logger.debug('Checking OpenClaw gateway readiness before use', {
port: this.port,
portReady,
controlPlaneStatus: this.controlPlaneStatus,
})
if (portReady) {
return
}
this.controlPlaneStatus = 'failed'
this.lastGatewayError = 'OpenClaw gateway is not ready'
this.lastRecoveryReason = 'container_not_ready'
throw new Error('OpenClaw gateway is not ready')
}
private async runControlPlaneCall<T>(fn: () => Promise<T>): Promise<T> {
try {
logger.info('Connecting OpenClaw control plane', {
port: this.port,
status: this.controlPlaneStatus,
})
await this.connectGateway()
await this.ensureTokenLoaded()
const result = await fn()
this.controlPlaneStatus = 'connected'
this.lastGatewayError = null
this.lastRecoveryReason = null
logger.info('OpenClaw gateway control plane connected', {
port: this.port,
})
return
return result
} catch (error) {
const reason = this.classifyGatewayError(error)
const message = error instanceof Error ? error.message : String(error)
const reason = this.classifyControlPlaneError(error)
this.controlPlaneStatus = 'failed'
this.lastGatewayError = message
this.lastRecoveryReason = reason
logger.warn('OpenClaw gateway connect failed', { reason, error: message })
if (!this.isRecoverableGatewayError(reason)) {
this.controlPlaneStatus = 'failed'
throw error
}
this.controlPlaneStatus = 'recovering'
logProgress(`Recovering gateway connection: ${reason}`)
await this.performGatewayRecovery(reason, logProgress)
try {
await this.connectGateway()
this.controlPlaneStatus = 'connected'
this.lastGatewayError = null
logger.info('OpenClaw gateway control plane recovered', {
reason,
port: this.port,
})
} catch (retryError) {
const retryMessage =
retryError instanceof Error ? retryError.message : String(retryError)
this.lastGatewayError = retryMessage
this.lastRecoveryReason = this.classifyGatewayError(retryError)
this.controlPlaneStatus = 'failed'
logger.error('OpenClaw gateway recovery failed', {
reason,
error: retryMessage,
})
throw retryError
}
throw error
}
}
// ── Internal ─────────────────────────────────────────────────────────
/**
* Approves the latest pending device pair request via the openclaw CLI
* running inside the container. This is needed because the gateway requires
* Ed25519 device identity and approval before granting operator scopes.
*/
private async approvePendingDevice(
logProgress: (msg: string) => void,
): Promise<void> {
logger.info('Approving pending OpenClaw device pairing')
// List pending devices to get the request ID
const output: string[] = []
const listCode = await this.runtime.execInContainer(
[
'node',
'dist/index.js',
'devices',
'list',
'--json',
'--token',
this.token,
],
(line) => output.push(line),
)
if (listCode !== 0) {
throw new Error(`Failed to list pending devices (exit ${listCode})`)
}
const jsonStr = output.join('\n')
let data: {
pending?: Array<{ requestId: string; deviceId?: string }>
}
try {
data = JSON.parse(jsonStr)
} catch {
throw new Error(
`Failed to parse device list output: ${jsonStr.slice(0, 200)}`,
)
}
const pending = data.pending
if (!pending?.length) {
logger.warn('No pending device pair requests found')
throw new Error('No pending device pair requests to approve')
}
const clientDeviceId = await this.readClientDeviceId()
const pendingRequest =
pending.find((request) => request.deviceId === clientDeviceId) ??
pending[0]
const requestId = pendingRequest.requestId
if (clientDeviceId && pendingRequest.deviceId !== clientDeviceId) {
logger.warn('Pending device request did not match client identity', {
clientDeviceId,
approvedRequestId: requestId,
})
}
logProgress(`Approving device pair request ${requestId.slice(0, 8)}...`)
const code = await this.runtime.execInContainer([
'node',
'dist/index.js',
'devices',
'approve',
requestId,
'--token',
this.token,
'--json',
])
if (code !== 0) {
logger.warn('Device approval command exited with code', { code })
throw new Error('Failed to approve client device pairing')
}
logProgress('Client device approved')
}
private async connectGateway(): Promise<void> {
this.disconnectGateway()
logger.info('Connecting OpenClaw gateway client', {
port: this.port,
})
const gateway = new GatewayClient(this.port, this.token, this.openclawDir)
await gateway.connect()
this.gateway = gateway
}
private disconnectGateway(): void {
if (this.gateway) {
this.gateway.disconnect()
this.gateway = null
}
this.controlPlaneStatus = 'disconnected'
}
private async ensureGatewayReady(): Promise<void> {
if (this.gateway?.isConnected) {
this.controlPlaneStatus = 'connected'
return
}
const portReady = await this.runtime.isReady(this.port)
logger.info('Checking OpenClaw gateway readiness before WS use', {
port: this.port,
portReady,
hasGatewayClient: !!this.gateway,
gatewayConnected: !!this.gateway?.isConnected,
})
if (!portReady) {
this.controlPlaneStatus = 'failed'
this.lastGatewayError = 'OpenClaw gateway is not ready'
this.lastRecoveryReason = 'container_not_ready'
throw new Error('OpenClaw gateway is not ready')
}
if (this.gatewayReconnectPromise) {
await this.gatewayReconnectPromise
return
}
this.gatewayReconnectPromise = this.connectGatewayResiliently()
try {
await this.gatewayReconnectPromise
} finally {
this.gatewayReconnectPromise = null
}
}
private classifyGatewayError(error: unknown): OpenClawGatewayRecoveryReason {
private classifyControlPlaneError(
error: unknown,
): OpenClawGatewayRecoveryReason {
const message = error instanceof Error ? error.message : String(error)
if (message.includes('signature expired')) return 'signature_expired'
if (message.includes('pairing required')) return 'pairing_required'
if (message.includes('Gateway WS not connected'))
return 'transient_disconnect'
if (message.includes('Unauthorized')) return 'token_mismatch'
if (message.includes('token')) return 'token_mismatch'
if (message.includes('not ready')) return 'container_not_ready'
return 'unknown'
}
private isRecoverableGatewayError(
reason: OpenClawGatewayRecoveryReason,
): boolean {
return (
reason === 'transient_disconnect' ||
reason === 'signature_expired' ||
reason === 'pairing_required' ||
reason === 'token_mismatch'
)
}
private async performGatewayRecovery(
reason: OpenClawGatewayRecoveryReason,
logProgress: (msg: string) => void,
): Promise<void> {
switch (reason) {
case 'signature_expired': {
logProgress('Restarting gateway to resync device signature clock...')
await this.runtime.composeRestart(logProgress)
const ready = await this.runtime.waitForReady(
this.port,
READY_TIMEOUT_MS,
)
if (!ready) {
throw new Error('Gateway not ready after clock resync restart')
}
return
}
case 'pairing_required':
logProgress('Approving pending device pairing...')
await this.approvePendingDevice(logProgress)
return
case 'token_mismatch':
logProgress('Reloading gateway auth token...')
await this.loadTokenFromEnv()
return
case 'transient_disconnect':
logProgress('Retrying gateway connection...')
return
default:
throw new Error(`Unrecoverable gateway error: ${reason}`)
}
}
private async writeBootstrapConfig(
config: Record<string, unknown>,
): Promise<void> {
@@ -884,7 +688,7 @@ export class OpenClawService {
if (this.stopLogTail) return
try {
this.stopLogTail = this.runtime.tailGatewayLogs((line) => {
logger.debug(`[openclaw] ${line}`)
logger.debug(line)
})
logger.info('Streaming OpenClaw gateway logs into server log (dev mode)')
} catch (err) {
@@ -911,6 +715,12 @@ export class OpenClawService {
)
}
private getContainerWorkspacePath(agentName: string): string {
return agentName === 'main'
? `${OPENCLAW_CONTAINER_HOME}/workspace`
: `${OPENCLAW_CONTAINER_HOME}/workspace-${agentName}`
}
private async writeRoleBootstrapFiles(
agentName: string,
role: ReturnType<typeof resolveRoleTemplate> | BrowserOSCustomRoleInput,
@@ -1013,6 +823,14 @@ export class OpenClawService {
return addedNew || updatedExisting
}
private async ensureTokenLoaded(): Promise<void> {
if (!existsSync(join(this.openclawDir, '.env'))) {
return
}
await this.loadTokenFromEnv()
}
private async mergeProviderConfigIfChanged(input: {
providerType?: string
providerName?: string
@@ -1108,18 +926,6 @@ export class OpenClawService {
}
}
private async readClientDeviceId(): Promise<string | null> {
try {
const identityPath = join(this.openclawDir, 'client-identity.json')
const identity = JSON.parse(await readFile(identityPath, 'utf-8')) as {
deviceId?: string
}
return identity.deviceId ?? null
} catch {
return null
}
}
private createProgressLogger(
onLog?: (msg: string) => void,
): (msg: string) => void {

View File

@@ -0,0 +1,18 @@
/**
* @license
* Copyright 2025 BrowserOS
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
export interface OpenClawStreamEvent {
type:
| 'text-delta'
| 'thinking'
| 'tool-start'
| 'tool-end'
| 'tool-output'
| 'lifecycle'
| 'done'
| 'error'
data: Record<string, unknown>
}

View File

@@ -0,0 +1,66 @@
/**
* @license
* Copyright 2025 BrowserOS
*/
import { afterEach, describe, expect, it, mock } from 'bun:test'
describe('createOpenClawRoutes', () => {
afterEach(() => {
mock.restore()
})
it('preserves BrowserOS SSE framing and session headers for chat', async () => {
const actualOpenClawService = await import(
'../../../src/api/services/openclaw/openclaw-service'
)
const chatStream = mock(
async () =>
new ReadableStream({
start(controller) {
controller.enqueue({
type: 'text-delta',
data: { text: 'Hello' },
})
controller.enqueue({
type: 'done',
data: { text: 'Hello' },
})
controller.close()
},
}),
)
mock.module('../../../src/api/services/openclaw/openclaw-service', () => ({
...actualOpenClawService,
getOpenClawService: () =>
({
chatStream,
}) as never,
}))
const { createOpenClawRoutes } = await import(
'../../../src/api/routes/openclaw'
)
const route = createOpenClawRoutes()
const response = await route.request('/agents/research/chat', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
message: 'hi',
sessionKey: 'session-123',
}),
})
expect(response.status).toBe(200)
expect(response.headers.get('Content-Type')).toContain('text/event-stream')
expect(response.headers.get('X-Session-Key')).toBe('session-123')
expect(chatStream).toHaveBeenCalledWith('research', 'session-123', 'hi')
expect(await response.text()).toBe(
'data: {"type":"text-delta","data":{"text":"Hello"}}\n\n' +
'data: {"type":"done","data":{"text":"Hello"}}\n\n' +
'data: [DONE]\n\n',
)
})
})

View File

@@ -0,0 +1,127 @@
/**
* @license
* Copyright 2025 BrowserOS
*/
import { describe, expect, it, mock } from 'bun:test'
import { OPENCLAW_CONTAINER_HOME } from '@browseros/shared/constants/openclaw'
import { OpenClawAdminClient } from '../../../../src/api/services/openclaw/openclaw-admin-client'
describe('OpenClawAdminClient', () => {
it('lists agents from JSON CLI output', async () => {
const execInContainer = mock(
async (_command: string[], onLog?: (line: string) => void) => {
onLog?.(
JSON.stringify([
{
id: 'main',
workspace: `${OPENCLAW_CONTAINER_HOME}/workspace`,
model: 'openrouter/anthropic/claude-haiku-4-5',
},
]),
)
return 0
},
)
const client = new OpenClawAdminClient(
{ execInContainer },
async () => 'gateway-token',
)
const agents = await client.listAgents()
expect(execInContainer).toHaveBeenCalledTimes(1)
expect(execInContainer.mock.calls[0]?.[0]).toEqual([
'node',
'dist/index.js',
'agents',
'list',
'--json',
'--token',
'gateway-token',
])
expect(agents).toEqual([
{
agentId: 'main',
name: 'main',
workspace: `${OPENCLAW_CONTAINER_HOME}/workspace`,
model: 'openrouter/anthropic/claude-haiku-4-5',
},
])
})
it('creates an agent non-interactively and reads it back from the agent list', async () => {
let callIndex = 0
const execInContainer = mock(
async (command: string[], onLog?: (line: string) => void) => {
callIndex += 1
if (callIndex === 1) {
expect(command).toEqual([
'node',
'dist/index.js',
'agents',
'add',
'research',
'--workspace',
`${OPENCLAW_CONTAINER_HOME}/workspace-research`,
'--model',
'openai/gpt-5.4-mini',
'--non-interactive',
'--json',
'--token',
'gateway-token',
])
return 0
}
onLog?.(
JSON.stringify([
{
id: 'main',
workspace: `${OPENCLAW_CONTAINER_HOME}/workspace`,
},
{
id: 'research',
workspace: `${OPENCLAW_CONTAINER_HOME}/workspace-research`,
model: 'openai/gpt-5.4-mini',
},
]),
)
return 0
},
)
const client = new OpenClawAdminClient(
{ execInContainer },
async () => 'gateway-token',
)
const agent = await client.createAgent({
name: 'research',
workspace: `${OPENCLAW_CONTAINER_HOME}/workspace-research`,
model: 'openai/gpt-5.4-mini',
})
expect(execInContainer).toHaveBeenCalledTimes(2)
expect(agent).toEqual({
agentId: 'research',
name: 'research',
workspace: `${OPENCLAW_CONTAINER_HOME}/workspace-research`,
model: 'openai/gpt-5.4-mini',
})
})
it('includes CLI stderr or stdout in thrown errors', async () => {
const execInContainer = mock(
async (_command: string[], onLog?: (line: string) => void) => {
onLog?.('agent already exists')
return 1
},
)
const client = new OpenClawAdminClient(
{ execInContainer },
async () => 'gateway-token',
)
await expect(client.listAgents()).rejects.toThrow('agent already exists')
})
})

View File

@@ -0,0 +1,196 @@
/**
* @license
* Copyright 2025 BrowserOS
*/
import { afterEach, describe, expect, it, mock } from 'bun:test'
import { OpenClawHttpChatClient } from '../../../../src/api/services/openclaw/openclaw-http-chat-client'
describe('OpenClawHttpChatClient', () => {
const originalFetch = globalThis.fetch
afterEach(() => {
globalThis.fetch = originalFetch
})
it('maps chat completion deltas into BrowserOS stream events', async () => {
const fetchMock = mock((_url: string | URL, _init?: RequestInit) =>
Promise.resolve(
new Response(
new ReadableStream({
start(controller) {
const encoder = new TextEncoder()
controller.enqueue(
encoder.encode(
'data: {"choices":[{"delta":{"content":"Hello"}}]}\n\n',
),
)
controller.enqueue(
encoder.encode(
'data: {"choices":[{"delta":{"content":" world"}}]}\n\n',
),
)
controller.enqueue(
encoder.encode(
'data: {"choices":[{"delta":{},"finish_reason":"stop"}]}\n\n',
),
)
controller.enqueue(encoder.encode('data: [DONE]\n\n'))
controller.close()
},
}),
{
status: 200,
headers: { 'Content-Type': 'text/event-stream' },
},
),
),
)
globalThis.fetch = fetchMock as typeof globalThis.fetch
const client = new OpenClawHttpChatClient(
18789,
async () => 'gateway-token',
)
const stream = await client.streamChat({
agentId: 'research',
sessionKey: 'session-123',
message: 'hi',
})
const events = await readEvents(stream)
const call = fetchMock.mock.calls[0]
expect(call?.[0]).toBe('http://127.0.0.1:18789/v1/chat/completions')
expect(call?.[1]).toMatchObject({
method: 'POST',
headers: {
Authorization: 'Bearer gateway-token',
'Content-Type': 'application/json',
},
})
expect(JSON.parse(String(call?.[1]?.body))).toEqual({
model: 'openclaw/research',
stream: true,
messages: [{ role: 'user', content: 'hi' }],
user: 'browseros:research:session-123',
})
expect(events).toEqual([
{ type: 'text-delta', data: { text: 'Hello' } },
{ type: 'text-delta', data: { text: ' world' } },
{ type: 'done', data: { text: 'Hello world' } },
])
})
it('uses openclaw/default for the main agent', async () => {
const fetchMock = mock(() =>
Promise.resolve(
new Response(
new ReadableStream({
start(controller) {
controller.close()
},
}),
{
status: 200,
headers: { 'Content-Type': 'text/event-stream' },
},
),
),
)
globalThis.fetch = fetchMock as typeof globalThis.fetch
const client = new OpenClawHttpChatClient(
18789,
async () => 'gateway-token',
)
await client.streamChat({
agentId: 'main',
sessionKey: 'session-123',
message: 'hi',
})
const body = JSON.parse(String(fetchMock.mock.calls[0]?.[1]?.body)) as {
model: string
}
expect(body.model).toBe('openclaw/default')
})
it('throws on non-success HTTP responses', async () => {
globalThis.fetch = mock(() =>
Promise.resolve(new Response('Unauthorized', { status: 401 })),
) as typeof globalThis.fetch
const client = new OpenClawHttpChatClient(
18789,
async () => 'gateway-token',
)
await expect(
client.streamChat({
agentId: 'research',
sessionKey: 'session-123',
message: 'hi',
}),
).rejects.toThrow('Unauthorized')
})
it('stops processing batched SSE events after a malformed chunk closes the stream', async () => {
const fetchMock = mock(() =>
Promise.resolve(
new Response(
new ReadableStream({
start(controller) {
const encoder = new TextEncoder()
controller.enqueue(
encoder.encode(
'data: {"choices":[{"delta":{"content":"Hello"}}]}\n\n' +
'data: not-json\n\n' +
'data: {"choices":[{"delta":{"content":" world"}}]}\n\n',
),
)
controller.close()
},
}),
{
status: 200,
headers: { 'Content-Type': 'text/event-stream' },
},
),
),
)
globalThis.fetch = fetchMock as typeof globalThis.fetch
const client = new OpenClawHttpChatClient(
18789,
async () => 'gateway-token',
)
const stream = await client.streamChat({
agentId: 'research',
sessionKey: 'session-123',
message: 'hi',
})
await expect(readEvents(stream)).resolves.toEqual([
{ type: 'text-delta', data: { text: 'Hello' } },
{
type: 'error',
data: { message: 'Failed to parse OpenClaw chat stream chunk' },
},
])
})
})
async function readEvents(
stream: ReadableStream<{ type: string; data: Record<string, unknown> }>,
): Promise<Array<{ type: string; data: Record<string, unknown> }>> {
const reader = stream.getReader()
const events: Array<{ type: string; data: Record<string, unknown> }> = []
while (true) {
const { done, value } = await reader.read()
if (done) break
events.push(value)
}
return events
}

View File

@@ -0,0 +1,188 @@
/**
* @license
* Copyright 2025 BrowserOS
*/
import { afterEach, describe, expect, it, mock } from 'bun:test'
import { mkdtemp, readFile, rm, writeFile } from 'node:fs/promises'
import { tmpdir } from 'node:os'
import { join } from 'node:path'
import { OPENCLAW_CONTAINER_HOME } from '@browseros/shared/constants/openclaw'
import { OpenClawService } from '../../../../src/api/services/openclaw/openclaw-service'
type MutableOpenClawService = OpenClawService & {
openclawDir: string
token: string
runtime: {
ensureReady?: () => Promise<void>
isPodmanAvailable?: () => Promise<boolean>
getMachineStatus?: () => Promise<{ initialized: boolean; running: boolean }>
isReady: () => Promise<boolean>
copyComposeFile?: (_source: string) => Promise<void>
writeEnvFile?: (_content: string) => Promise<void>
composePull?: () => Promise<void>
composeUp?: () => Promise<void>
waitForReady?: () => Promise<boolean>
}
adminClient: {
probe?: ReturnType<typeof mock>
createAgent?: ReturnType<typeof mock>
listAgents?: ReturnType<typeof mock>
}
}
describe('OpenClawService', () => {
let tempDir: string | null = null
afterEach(async () => {
mock.restore()
if (tempDir) {
await rm(tempDir, { recursive: true, force: true })
tempDir = null
}
})
it('creates agents through the admin client and writes role bootstrap files', async () => {
tempDir = await mkdtemp(join(tmpdir(), 'openclaw-service-'))
const createAgent = mock(async () => ({
agentId: 'ops',
name: 'ops',
workspace: `${OPENCLAW_CONTAINER_HOME}/workspace-ops`,
model: 'openclaw/default',
}))
const service = new OpenClawService() as MutableOpenClawService
service.openclawDir = tempDir
service.runtime = {
isReady: async () => true,
}
service.adminClient = {
createAgent,
}
const agent = await service.createAgent({
name: 'ops',
roleId: 'chief-of-staff',
})
expect(createAgent).toHaveBeenCalledWith({
name: 'ops',
workspace: `${OPENCLAW_CONTAINER_HOME}/workspace-ops`,
model: undefined,
})
expect(agent.role).toEqual({
roleSource: 'builtin',
roleId: 'chief-of-staff',
roleName: 'Chief of Staff',
shortDescription:
'Executive coordination, follow-ups, scheduling, and briefing support.',
})
const roleMetadata = JSON.parse(
await readFile(
join(tempDir, 'workspace-ops', '.browseros-role.json'),
'utf-8',
),
) as {
roleId: string
agentName: string
}
expect(roleMetadata).toMatchObject({
roleId: 'chief-of-staff',
agentName: 'ops',
})
})
it('maps successful admin probes into connected status', async () => {
const service = new OpenClawService() as MutableOpenClawService
service.runtime = {
isPodmanAvailable: async () => true,
getMachineStatus: async () => ({ initialized: true, running: true }),
isReady: async () => true,
}
service.adminClient = {
listAgents: mock(async () => [
{
agentId: 'main',
name: 'main',
workspace: `${OPENCLAW_CONTAINER_HOME}/workspace`,
},
{
agentId: 'ops',
name: 'ops',
workspace: `${OPENCLAW_CONTAINER_HOME}/workspace-ops`,
},
]),
}
const status = await service.getStatus()
expect(status).toEqual({
status: 'running',
podmanAvailable: true,
machineReady: true,
port: 18789,
agentCount: 2,
error: null,
controlPlaneStatus: 'connected',
lastGatewayError: null,
lastRecoveryReason: null,
})
})
it('creates the main agent during setup when the gateway starts without one', async () => {
tempDir = await mkdtemp(join(tmpdir(), 'openclaw-service-'))
const createAgent = mock(async () => ({
agentId: 'main',
name: 'main',
workspace: `${OPENCLAW_CONTAINER_HOME}/workspace`,
}))
const service = new OpenClawService() as MutableOpenClawService
service.openclawDir = tempDir
service.runtime = {
isPodmanAvailable: async () => true,
ensureReady: async () => {},
isReady: async () => true,
copyComposeFile: async () => {},
writeEnvFile: async () => {},
composePull: async () => {},
composeUp: async () => {},
waitForReady: async () => true,
}
service.adminClient = {
probe: mock(async () => {}),
listAgents: mock(async () => []),
createAgent,
}
await service.setup({})
expect(createAgent).toHaveBeenCalledWith({
name: 'main',
workspace: `${OPENCLAW_CONTAINER_HOME}/workspace`,
model: undefined,
})
})
it('loads the persisted gateway token before control plane calls', async () => {
tempDir = await mkdtemp(join(tmpdir(), 'openclaw-service-'))
await writeFile(join(tempDir, '.env'), 'OPENCLAW_GATEWAY_TOKEN=env-token\n')
const service = new OpenClawService() as MutableOpenClawService
service.openclawDir = tempDir
service.token = 'random-token'
service.runtime = {
isReady: async () => true,
}
service.adminClient = {
listAgents: mock(async () => {
expect(service.token).toBe('env-token')
return []
}),
}
await service.listAgents()
})
})

View File

@@ -156,7 +156,7 @@
},
"apps/server": {
"name": "@browseros/server",
"version": "0.0.85",
"version": "0.0.87",
"bin": {
"browseros-server": "./src/index.ts",
},

View File

@@ -0,0 +1,58 @@
#!/usr/bin/env python3
"""Shared sign metadata for BrowserOS Server binaries.
Consumed by both the Chromium-build signing path (build/modules/sign/) and the
OTA release path (build/modules/ota/). Adding a new third-party binary here
means both paths pick it up automatically.
"""
from dataclasses import dataclass
from pathlib import Path
from typing import Dict, List, Optional
@dataclass(frozen=True)
class SignSpec:
"""Per-binary codesign metadata.
``entitlements`` is the filename of the plist under
``resources/entitlements/``; ``None`` means no extra entitlements.
"""
identifier_suffix: str
options: str
entitlements: Optional[str] = None
MACOS_SERVER_BINARIES: Dict[str, SignSpec] = {
"browseros_server": SignSpec(
"browseros_server", "runtime", "browseros-executable-entitlements.plist"
),
"bun": SignSpec("bun", "runtime", "browseros-executable-entitlements.plist"),
"rg": SignSpec("rg", "runtime"),
"podman": SignSpec("podman", "runtime"),
"gvproxy": SignSpec("gvproxy", "runtime"),
"vfkit": SignSpec("vfkit", "runtime", "podman-vfkit-entitlements.plist"),
"krunkit": SignSpec("krunkit", "runtime", "podman-krunkit-entitlements.plist"),
"podman-mac-helper": SignSpec("podman_mac_helper", "runtime"),
}
WINDOWS_SERVER_BINARIES: List[str] = [
"browseros_server.exe",
"third_party/bun.exe",
"third_party/rg.exe",
"third_party/podman/podman.exe",
"third_party/podman/gvproxy.exe",
"third_party/podman/win-sshproxy.exe",
]
def macos_sign_spec_for(binary_path: Path) -> Optional[SignSpec]:
"""Look up sign metadata by file stem (e.g., ``podman-mac-helper``)."""
return MACOS_SERVER_BINARIES.get(binary_path.stem)
def expected_windows_binary_paths(server_bin_dir: Path) -> List[Path]:
"""Resolve the Windows relative-path list against a ``resources/bin`` dir."""
return [server_bin_dir / rel for rel in WINDOWS_SERVER_BINARIES]

View File

@@ -0,0 +1,63 @@
#!/usr/bin/env python3
"""Tests for the shared server-binary sign table."""
import unittest
from pathlib import Path
from .server_binaries import (
MACOS_SERVER_BINARIES,
WINDOWS_SERVER_BINARIES,
expected_windows_binary_paths,
macos_sign_spec_for,
)
ENTITLEMENTS_DIR = Path(__file__).resolve().parents[2] / "resources" / "entitlements"
class MacosServerBinariesTest(unittest.TestCase):
def test_every_entry_has_identifier_and_options(self):
for stem, spec in MACOS_SERVER_BINARIES.items():
self.assertTrue(spec.identifier_suffix, f"{stem} missing identifier_suffix")
self.assertTrue(spec.options, f"{stem} missing options")
def test_every_entitlements_plist_exists_on_disk(self):
for stem, spec in MACOS_SERVER_BINARIES.items():
if spec.entitlements is None:
continue
plist = ENTITLEMENTS_DIR / spec.entitlements
self.assertTrue(plist.exists(), f"{stem}: entitlements {plist} missing")
def test_macos_sign_spec_for_resolves_by_stem(self):
spec = macos_sign_spec_for(Path("/x/podman-mac-helper"))
assert spec is not None
self.assertEqual(spec.identifier_suffix, "podman_mac_helper")
self.assertIsNone(macos_sign_spec_for(Path("/x/not_a_known_binary")))
def test_matches_podman_bundle_layout(self):
required = {"podman", "gvproxy", "vfkit", "krunkit", "podman-mac-helper"}
self.assertTrue(required.issubset(MACOS_SERVER_BINARIES.keys()))
class WindowsServerBinariesTest(unittest.TestCase):
def test_no_duplicates(self):
self.assertEqual(
len(WINDOWS_SERVER_BINARIES), len(set(WINDOWS_SERVER_BINARIES))
)
def test_paths_within_expected_layout(self):
for rel in WINDOWS_SERVER_BINARIES:
self.assertTrue(
rel == "browseros_server.exe" or rel.startswith("third_party/"),
f"{rel} outside expected layout",
)
def test_expected_windows_binary_paths_joins_root(self):
root = Path("/tmp/fake/resources/bin")
resolved = expected_windows_binary_paths(root)
self.assertEqual(len(resolved), len(WINDOWS_SERVER_BINARIES))
for rel, abs_path in zip(WINDOWS_SERVER_BINARIES, resolved):
self.assertEqual(abs_path, root / rel)
if __name__ == "__main__":
unittest.main()

View File

@@ -7,52 +7,16 @@
<language>en</language>
<item>
<sparkle:version>0.0.74</sparkle:version>
<pubDate>Thu, 12 Mar 2026 21:20:48 +0000</pubDate>
<sparkle:version>0.0.86</sparkle:version>
<pubDate>Thu, 16 Apr 2026 18:58:59 +0000</pubDate>
<!-- macOS arm64 -->
<enclosure
url="https://cdn.browseros.com/server/browseros_server_0.0.74_darwin_arm64.zip"
url="https://cdn.browseros.com/server/browseros_server_0.0.86_darwin_arm64.zip"
sparkle:os="macos"
sparkle:arch="arm64"
sparkle:edSignature="aPuQG3dtQj5v857CNSZ+Ahz3bxUOM7+tSEskW0mIbJV6969a3j1kAqOQ20D1FcxlEyYqquFOaeHpoGaDi6LsDg=="
length="22191352"
type="application/zip"/>
<!-- macOS x86_64 -->
<enclosure
url="https://cdn.browseros.com/server/browseros_server_0.0.74_darwin_x64.zip"
sparkle:os="macos"
sparkle:arch="x86_64"
sparkle:edSignature="X+FCQFH2HpBG43UiJjE0FkheyfOAUW2dhtmKn9HKRrJkqMGsaw+bhjdze1lP02oz71b8Q9AkC2NYwSUN0m0FAQ=="
length="24641802"
type="application/zip"/>
<!-- Linux arm64 -->
<enclosure
url="https://cdn.browseros.com/server/browseros_server_0.0.74_linux_arm64.zip"
sparkle:os="linux"
sparkle:arch="arm64"
sparkle:edSignature="1tnET+iFDYEc9kdwV9U3mo4rExX0JBnlJOrcEQOGBwR/478NxbOsPx3AI/H7216HlylayNj7bYLVJY/FJqY2Dg=="
length="37751728"
type="application/zip"/>
<!-- Linux x86_64 -->
<enclosure
url="https://cdn.browseros.com/server/browseros_server_0.0.74_linux_x64.zip"
sparkle:os="linux"
sparkle:arch="x86_64"
sparkle:edSignature="/OUrTZmgYWIWWWu71XAzN0B6hgs2WD9MOiZsXMvsv22TZwlEP1RdQsEO84JgFMb9if37MZX47utA2UWpSfFtAg=="
length="39041390"
type="application/zip"/>
<!-- Windows x86_64 -->
<enclosure
url="https://cdn.browseros.com/server/browseros_server_0.0.74_windows_x64.zip"
sparkle:os="windows"
sparkle:arch="x86_64"
sparkle:edSignature="qd7XYvoa59QA1bSUkaXbtBCti8DQGh3mWWfPG1qtgk5InLXJ07Y0ve/Y6ZAn8fyz6XGLEgMVhUa6eblmVuUODw=="
length="40986233"
sparkle:edSignature="kkM3dFanJr9TQgRPV7NOs7GwYpVfLHH+Db6oUWLHTWQFODBy8wx46fD6sioQdsB4k+9Ra9QCBm0WRSvKDkljDQ=="
length="101284695"
type="application/zip"/>
</item>

View File

@@ -0,0 +1,55 @@
# BrowserOS macOS Release Build Configuration (arm64 only)
#
# Single-architecture arm64 release build. Skips the universal_build
# pipeline (no x64, no lipo merge) — follows the standard per-arch flow
# like release.windows.yaml / release.linux.yaml.
#
# Environment Variables:
# Use !env tag to reference environment variables:
# Example: chromium_src: !env CHROMIUM_SRC
build:
type: release
architecture: arm64
gn_flags:
file: build/config/gn/flags.macos.release.gn
# Explicit module execution order
modules:
# Phase 1: Setup
- clean
- git_setup
- sparkle_setup
# Phase 2: Patches & Resources
- download_resources
- resources
- bundled_extensions
- chromium_replace
- string_replaces
- series_patches
- patches
# Phase 3: Build
- configure
- compile
# Phase 4: Sign & Package
- sign_macos
- package_macos
# Phase 5: Upload
- upload
# Required environment variables
# Note: CHROMIUM_SRC can be provided via --chromium-src CLI flag, YAML config, or env var
required_envs:
- MACOS_CERTIFICATE_NAME
- PROD_MACOS_NOTARIZATION_APPLE_ID
- PROD_MACOS_NOTARIZATION_TEAM_ID
- PROD_MACOS_NOTARIZATION_PWD
# Notification settings
notifications:
slack: true

View File

@@ -9,12 +9,16 @@ from .common import (
SignedArtifact,
SERVER_PLATFORMS,
APPCAST_TEMPLATE,
find_server_binary,
find_server_resources_dir,
create_server_bundle_zip,
)
from .sign_binary import (
sign_macos_binary,
notarize_macos_binary,
notarize_macos_zip,
sign_windows_binary,
sign_server_bundle_macos,
sign_server_bundle_windows,
)
from .server import ServerOTAModule
@@ -30,10 +34,14 @@ __all__ = [
"parse_existing_appcast",
"ExistingAppcast",
"SignedArtifact",
"find_server_binary",
"find_server_resources_dir",
"create_server_bundle_zip",
"sign_macos_binary",
"notarize_macos_binary",
"notarize_macos_zip",
"sign_windows_binary",
"sign_server_bundle_macos",
"sign_server_bundle_windows",
"SERVER_PLATFORMS",
"APPCAST_TEMPLATE",
]

View File

@@ -0,0 +1,93 @@
#!/usr/bin/env python3
"""Tests for OTA bundle-zip creation."""
import stat
import sys
import tempfile
import unittest
import zipfile
from pathlib import Path
from .common import create_server_bundle_zip, find_server_resources_dir
def _write_exec(path: Path, content: bytes) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_bytes(content)
path.chmod(path.stat().st_mode | 0o755)
class CreateServerBundleZipTest(unittest.TestCase):
def test_bundles_full_resources_tree(self):
with tempfile.TemporaryDirectory() as tmp:
staging = Path(tmp) / "darwin-arm64"
resources = staging / "resources"
_write_exec(resources / "bin" / "browseros_server", b"server")
_write_exec(resources / "bin" / "third_party" / "bun", b"bun")
_write_exec(resources / "bin" / "third_party" / "rg", b"rg")
_write_exec(resources / "bin" / "third_party" / "podman" / "podman", b"pd")
_write_exec(
resources / "bin" / "third_party" / "podman" / "gvproxy", b"gv"
)
zip_path = Path(tmp) / "bundle.zip"
self.assertTrue(create_server_bundle_zip(resources, zip_path))
with zipfile.ZipFile(zip_path) as zf:
names = set(zf.namelist())
self.assertEqual(
names,
{
"resources/bin/browseros_server",
"resources/bin/third_party/bun",
"resources/bin/third_party/rg",
"resources/bin/third_party/podman/podman",
"resources/bin/third_party/podman/gvproxy",
},
)
@unittest.skipIf(sys.platform == "win32", "file mode check is meaningless on Windows")
def test_preserves_executable_bits(self):
with tempfile.TemporaryDirectory() as tmp:
resources = Path(tmp) / "darwin-arm64" / "resources"
_write_exec(resources / "bin" / "browseros_server", b"server")
zip_path = Path(tmp) / "bundle.zip"
self.assertTrue(create_server_bundle_zip(resources, zip_path))
with zipfile.ZipFile(zip_path) as zf:
info = zf.getinfo("resources/bin/browseros_server")
mode = (info.external_attr >> 16) & 0o777
self.assertTrue(mode & stat.S_IXUSR)
def test_missing_resources_dir_fails(self):
with tempfile.TemporaryDirectory() as tmp:
missing = Path(tmp) / "does-not-exist"
zip_path = Path(tmp) / "bundle.zip"
self.assertFalse(create_server_bundle_zip(missing, zip_path))
class FindServerResourcesDirTest(unittest.TestCase):
def test_returns_resources_dir_when_present(self):
with tempfile.TemporaryDirectory() as tmp:
root = Path(tmp)
(root / "darwin-arm64" / "resources" / "bin").mkdir(parents=True)
found = find_server_resources_dir(
root, {"name": "darwin_arm64", "target": "darwin-arm64"}
)
self.assertEqual(found, root / "darwin-arm64" / "resources")
def test_returns_none_when_absent(self):
with tempfile.TemporaryDirectory() as tmp:
root = Path(tmp)
self.assertIsNone(
find_server_resources_dir(
root, {"name": "darwin_arm64", "target": "darwin-arm64"}
)
)
if __name__ == "__main__":
unittest.main()

View File

@@ -1,9 +1,7 @@
#!/usr/bin/env python3
"""Common utilities for OTA update modules"""
import os
import re
import shutil
import zipfile
import xml.etree.ElementTree as ET
from datetime import datetime, timezone
@@ -13,8 +11,9 @@ from dataclasses import dataclass
from ...common.utils import log_error, log_info, log_success
# Re-export sparkle_sign_file from common module
from ...common.sparkle import sparkle_sign_file
# Re-exported so callers (and ota/__init__.py) can get sparkle_sign_file
# from ota.common alongside the other OTA helpers.
from ...common.sparkle import sparkle_sign_file as sparkle_sign_file
# Sparkle XML namespace
SPARKLE_NS = "http://www.andymatuschak.org/xml-namespaces/sparkle"
@@ -76,33 +75,15 @@ class ExistingAppcast:
artifacts: Dict[str, SignedArtifact]
def find_server_binary(binaries_dir: Path, platform: dict) -> Optional[Path]:
"""Find server binary in either flat or artifact-extracted directory structure.
def find_server_resources_dir(binaries_dir: Path, platform: dict) -> Optional[Path]:
"""Return the extracted ``resources/`` dir for a platform, or ``None``.
Supports two layouts:
Flat: {binaries_dir}/{binary_name} (e.g., browseros-server-darwin-arm64)
Artifact: {binaries_dir}/{target}/resources/bin/browseros_server[.exe]
Args:
binaries_dir: Root directory containing server binaries
platform: Platform dict from SERVER_PLATFORMS
Returns:
Path to binary if found, None otherwise
``binaries_dir`` is the temp root created by ``_download_artifacts``; each
platform lives at ``<binaries_dir>/<target>/resources/``.
"""
# Flat structure (used with --binaries pointing to mono build output)
flat_path = binaries_dir / platform["binary"]
if flat_path.exists():
return flat_path
# Artifact-extracted structure (used after download_resources)
target = platform.get("target", platform["name"].replace("_", "-"))
bin_name = "browseros_server.exe" if platform["os"] == "windows" else "browseros_server"
artifact_path = binaries_dir / target / "resources" / "bin" / bin_name
if artifact_path.exists():
return artifact_path
return None
resources = binaries_dir / target / "resources"
return resources if resources.is_dir() else None
def parse_existing_appcast(appcast_path: Path) -> Optional[ExistingAppcast]:
@@ -254,46 +235,31 @@ def generate_server_appcast(
)
def create_server_zip(
binary_path: Path,
output_zip: Path,
is_windows: bool = False,
) -> bool:
"""Create zip with proper structure: resources/bin/browseros_server
def create_server_bundle_zip(resources_dir: Path, output_zip: Path) -> bool:
"""Zip an extracted ``resources/`` tree into a Sparkle payload.
Args:
binary_path: Path to the binary to package
output_zip: Path for output zip file
is_windows: Whether this is Windows binary (affects target name)
Returns:
True on success, False on failure
Produces entries like ``resources/bin/browseros_server``,
``resources/bin/third_party/podman/podman`` — mirroring what the agent
build staged and what the Chromium build bakes into the installed app.
File modes are preserved by ``ZipFile.write`` so executable bits survive.
"""
staging_dir = output_zip.parent / f"staging_{output_zip.stem}"
if not resources_dir.is_dir():
log_error(f"Resources dir not found: {resources_dir}")
return False
bundle_root = resources_dir.parent
try:
staging_dir.mkdir(parents=True, exist_ok=True)
bin_dir = staging_dir / "resources" / "bin"
bin_dir.mkdir(parents=True, exist_ok=True)
target_name = "browseros_server.exe" if is_windows else "browseros_server"
shutil.copy2(binary_path, bin_dir / target_name)
with zipfile.ZipFile(output_zip, 'w', zipfile.ZIP_DEFLATED) as zf:
for root, _, files in os.walk(staging_dir):
for file in files:
file_path = Path(root) / file
arcname = file_path.relative_to(staging_dir)
zf.write(file_path, arcname)
with zipfile.ZipFile(output_zip, "w", zipfile.ZIP_DEFLATED) as zf:
for path in sorted(resources_dir.rglob("*")):
if not path.is_file():
continue
arcname = path.relative_to(bundle_root).as_posix()
zf.write(path, arcname)
log_success(f"Created {output_zip.name}")
return True
except Exception as e:
log_error(f"Failed to create zip: {e}")
log_error(f"Failed to create bundle zip: {e}")
return False
finally:
if staging_dir.exists():
shutil.rmtree(staging_dir)
def get_appcast_path(channel: str = "alpha") -> Path:

View File

@@ -10,7 +10,6 @@ from ...common.module import CommandModule, ValidationError
from ...common.context import Context
from ...common.utils import (
log_info,
log_error,
log_success,
log_warning,
IS_MACOS,
@@ -23,15 +22,14 @@ from .common import (
sparkle_sign_file,
generate_server_appcast,
parse_existing_appcast,
create_server_zip,
create_server_bundle_zip,
get_appcast_path,
find_server_binary,
find_server_resources_dir,
)
from .sign_binary import (
sign_macos_binary,
notarize_macos_binary,
sign_windows_binary,
get_entitlements_path,
notarize_macos_zip,
sign_server_bundle_macos,
sign_server_bundle_windows,
)
from ..storage import get_r2_client, upload_file_to_r2, download_file_from_r2
from ..storage.download import extract_artifact_zip
@@ -89,11 +87,8 @@ class ServerOTAModule(CommandModule):
return [p for p in SERVER_PLATFORMS if p["name"] in requested]
return SERVER_PLATFORMS
def _download_artifacts(self, ctx: Context) -> Path:
"""Download server artifact zips from R2 latest/ and extract them."""
download_dir = Path(tempfile.mkdtemp(prefix="ota_artifacts_"))
self._download_dir = download_dir
def _download_artifacts(self, ctx: Context, download_dir: Path) -> None:
"""Download and extract server artifact zips from R2 into ``download_dir``."""
r2_client = get_r2_client(ctx.env)
if not r2_client:
raise RuntimeError("Failed to create R2 client")
@@ -117,69 +112,85 @@ class ServerOTAModule(CommandModule):
zip_path.unlink()
log_success(f"Downloaded {len(platforms)} artifact(s)")
return download_dir
def execute(self, context: Context) -> None:
ctx = context
log_info(f"\n🚀 BrowserOS Server OTA v{self.version} ({self.channel})")
log_info("=" * 70)
# Download artifacts from R2
binaries_dir = self._download_artifacts(ctx)
with tempfile.TemporaryDirectory(prefix="ota_artifacts_") as dl, \
tempfile.TemporaryDirectory(prefix="ota_staging_") as st:
binaries_dir = Path(dl)
temp_dir = Path(st)
log_info(f"Temp directory: {temp_dir}")
platforms = self._get_platforms()
temp_dir = Path(tempfile.mkdtemp())
log_info(f"Temp directory: {temp_dir}")
self._download_artifacts(ctx, binaries_dir)
signed_artifacts = self._build_platform_artifacts(
ctx, binaries_dir, temp_dir
)
self._finalize_release(ctx, signed_artifacts)
def _build_platform_artifacts(
self, ctx: Context, binaries_dir: Path, temp_dir: Path
) -> List[SignedArtifact]:
"""Sign + zip + Sparkle-sign each platform; fail fast on any error.
Any per-platform failure raises ``RuntimeError`` so a broken
credential or unregistered binary cannot silently omit a platform
from a published release.
"""
signed_artifacts: List[SignedArtifact] = []
for platform in platforms:
for platform in self._get_platforms():
log_info(f"\n📦 Processing {platform['name']}...")
source_binary = find_server_binary(binaries_dir, platform)
if not source_binary:
log_warning(f"Binary not found for {platform['name']}, skipping")
continue
source_resources = find_server_resources_dir(binaries_dir, platform)
if not source_resources:
raise RuntimeError(
f"Resources dir not found for {platform['name']}"
)
# Copy binary to temp to preserve original
temp_binary = temp_dir / platform["binary"]
shutil.copy2(source_binary, temp_binary)
staging_resources = temp_dir / platform["name"] / "resources"
shutil.copytree(source_resources, staging_resources)
if not self._sign_binary(temp_binary, platform, ctx):
log_warning(f"Skipping {platform['name']} due to signing failure")
continue
if not self._sign_bundle(staging_resources, platform, ctx):
raise RuntimeError(f"Signing failed for {platform['name']}")
zip_name = f"browseros_server_{self.version}_{platform['name']}.zip"
zip_path = temp_dir / zip_name
is_windows = platform["os"] == "windows"
if not create_server_zip(temp_binary, zip_path, is_windows):
log_error(f"Failed to create zip for {platform['name']}")
continue
if not create_server_bundle_zip(staging_resources, zip_path):
raise RuntimeError(f"Failed to create bundle for {platform['name']}")
if platform["os"] == "macos" and IS_MACOS():
if not notarize_macos_zip(zip_path, ctx.env):
raise RuntimeError(
f"Notarization failed for {platform['name']}"
)
log_info(f"Signing {zip_name} with Sparkle...")
signature, length = sparkle_sign_file(zip_path, ctx.env)
if not signature:
log_error(f"Failed to sign zip for {platform['name']}")
continue
raise RuntimeError(f"Sparkle signing failed for {platform['name']}")
log_success(f" {platform['name']}: {length} bytes")
artifact = SignedArtifact(
signed_artifacts.append(SignedArtifact(
platform=platform["name"],
zip_path=zip_path,
signature=signature,
length=length,
os=platform["os"],
arch=platform["arch"],
)
signed_artifacts.append(artifact)
))
if not signed_artifacts:
log_error("No artifacts were processed successfully")
raise RuntimeError("OTA failed - no artifacts")
raise RuntimeError("OTA failed - no artifacts processed")
return signed_artifacts
def _finalize_release(
self, ctx: Context, signed_artifacts: List[SignedArtifact]
) -> None:
"""Write the appcast, upload every signed zip to R2, and surface URLs."""
log_info("\n📝 Generating appcast...")
appcast_path = get_appcast_path(self.channel)
existing_appcast = parse_existing_appcast(appcast_path)
@@ -219,27 +230,27 @@ class ServerOTAModule(CommandModule):
log_info(f"\nAppcast saved to: {appcast_path}")
log_info("\n📋 Next step: Run 'browseros ota server release-appcast' to make the release live")
def _sign_binary(self, binary_path: Path, platform: dict, ctx: Context) -> bool:
"""Sign binary based on platform"""
def _sign_bundle(
self, staging_resources: Path, platform: dict, ctx: Context
) -> bool:
"""Codesign every binary in the staged resources tree for a platform.
macOS notarization happens separately, on the outer Sparkle zip.
"""
os_type = platform["os"]
if os_type == "macos":
if not IS_MACOS():
log_warning(f"macOS signing requires macOS - skipping {platform['name']}")
log_warning(
f"macOS signing requires macOS - leaving {platform['name']} unsigned"
)
return True
return sign_server_bundle_macos(
staging_resources, ctx.env, ctx.get_entitlements_dir()
)
entitlements = get_entitlements_path(ctx.root_dir)
if not sign_macos_binary(binary_path, ctx.env, entitlements):
return False
log_info("Notarizing...")
return notarize_macos_binary(binary_path, ctx.env)
elif os_type == "windows":
return sign_windows_binary(binary_path, ctx.env)
elif os_type == "linux":
log_info(f"No code signing for Linux binaries")
return True
if os_type == "windows":
return sign_server_bundle_windows(staging_resources, ctx.env)
log_info("No code signing for Linux binaries")
return True

View File

@@ -1,12 +1,18 @@
#!/usr/bin/env python3
"""Platform-specific binary signing for OTA binaries"""
import os
import shutil
import subprocess
import tempfile
from pathlib import Path
from typing import Optional
from typing import List, Optional
from ...common.env import EnvConfig
from ...common.server_binaries import (
expected_windows_binary_paths,
macos_sign_spec_for,
)
from ...common.utils import (
log_info,
log_error,
@@ -21,16 +27,17 @@ def sign_macos_binary(
binary_path: Path,
env: Optional[EnvConfig] = None,
entitlements_path: Optional[Path] = None,
*,
identifier: Optional[str] = None,
options: str = "runtime",
) -> bool:
"""Sign a macOS binary with codesign
"""Sign a macOS binary with codesign.
Args:
binary_path: Path to binary to sign
env: Environment config with certificate name
entitlements_path: Optional path to entitlements plist
Returns:
True on success, False on failure
``identifier`` defaults to ``com.browseros.<stem>`` to preserve the
previous single-binary signature shape. Callers that have a shared sign
table (see ``common/server_binaries.py``) should pass identifier and
options derived from that table so OTA-signed and Chromium-build-signed
binaries share the same code identifier.
"""
if not IS_MACOS():
log_error("macOS signing requires macOS")
@@ -46,13 +53,14 @@ def sign_macos_binary(
log_info(f"Signing {binary_path.name}...")
resolved_identifier = identifier or f"com.browseros.{binary_path.stem}"
cmd = [
"codesign",
"--sign", certificate_name,
"--force",
"--timestamp",
"--identifier", f"com.browseros.{binary_path.stem}",
"--options", "runtime",
"--identifier", resolved_identifier,
"--options", options,
]
if entitlements_path and entitlements_path.exists():
@@ -91,48 +99,91 @@ def verify_macos_signature(binary_path: Path) -> bool:
return False
def _resolve_notarization_credentials(
env: Optional[EnvConfig],
) -> Optional[EnvConfig]:
if env is None:
env = EnvConfig()
missing: List[str] = []
if not env.macos_notarization_apple_id:
missing.append("PROD_MACOS_NOTARIZATION_APPLE_ID")
if not env.macos_notarization_team_id:
missing.append("PROD_MACOS_NOTARIZATION_TEAM_ID")
if not env.macos_notarization_password:
missing.append("PROD_MACOS_NOTARIZATION_PWD")
if missing:
log_error("Missing notarization credentials:")
for name in missing:
log_error(f" {name} not set")
return None
return env
def _submit_notarization(submission_path: Path, env: EnvConfig) -> bool:
assert env.macos_notarization_apple_id is not None
assert env.macos_notarization_team_id is not None
assert env.macos_notarization_password is not None
subprocess.run(
[
"xcrun", "notarytool", "store-credentials", "notarytool-profile",
"--apple-id", env.macos_notarization_apple_id,
"--team-id", env.macos_notarization_team_id,
"--password", env.macos_notarization_password,
],
capture_output=True,
text=True,
check=False,
)
log_info("Submitting for notarization (this may take a while)...")
result = subprocess.run(
[
"xcrun", "notarytool", "submit", str(submission_path),
"--keychain-profile", "notarytool-profile",
"--wait",
],
capture_output=True,
text=True,
check=False,
)
if result.returncode != 0:
log_error(f"Notarization failed: {result.stderr}")
log_error(result.stdout)
return False
if "status: Accepted" not in result.stdout:
log_error("Notarization was not accepted")
log_error(result.stdout)
return False
return True
def notarize_macos_binary(
binary_path: Path,
env: Optional[EnvConfig] = None,
) -> bool:
"""Notarize a macOS binary with Apple
"""Notarize a single macOS binary with Apple.
The binary must be zipped for notarization submission.
Args:
binary_path: Path to binary to notarize (will be zipped internally)
env: Environment config with notarization credentials
Returns:
True on success, False on failure
The binary is first wrapped in a zip via ``ditto --keepParent`` because
``notarytool`` does not accept bare executables. For an already-zipped
Sparkle bundle, call :func:`notarize_macos_zip` instead — double-wrapping
nests zips and notarytool does not descend into nested archives.
"""
if not IS_MACOS():
log_error("macOS notarization requires macOS")
return False
env = _resolve_notarization_credentials(env)
if env is None:
env = EnvConfig()
apple_id = env.macos_notarization_apple_id
team_id = env.macos_notarization_team_id
password = env.macos_notarization_password
if not all([apple_id, team_id, password]):
log_error("Missing notarization credentials:")
if not apple_id:
log_error(" PROD_MACOS_NOTARIZATION_APPLE_ID not set")
if not team_id:
log_error(" PROD_MACOS_NOTARIZATION_TEAM_ID not set")
if not password:
log_error(" PROD_MACOS_NOTARIZATION_PWD not set")
return False
log_info(f"Notarizing {binary_path.name}...")
notarize_zip = None
notarize_zip: Optional[Path] = None
try:
fd, tmp_path = tempfile.mkstemp(suffix=".zip")
import os
os.close(fd)
notarize_zip = Path(tmp_path)
@@ -146,41 +197,7 @@ def notarize_macos_binary(
log_error(f"Failed to create zip: {result.stderr}")
return False
assert apple_id is not None
assert team_id is not None
assert password is not None
subprocess.run(
[
"xcrun", "notarytool", "store-credentials", "notarytool-profile",
"--apple-id", apple_id,
"--team-id", team_id,
"--password", password,
],
capture_output=True,
text=True,
check=False,
)
log_info("Submitting for notarization (this may take a while)...")
result = subprocess.run(
[
"xcrun", "notarytool", "submit", str(notarize_zip),
"--keychain-profile", "notarytool-profile",
"--wait",
],
capture_output=True,
text=True,
check=False,
)
if result.returncode != 0:
log_error(f"Notarization failed: {result.stderr}")
log_error(result.stdout)
return False
if "status: Accepted" not in result.stdout:
log_error("Notarization was not accepted")
log_error(result.stdout)
if not _submit_notarization(notarize_zip, env):
return False
log_success(f"Notarized {binary_path.name}")
@@ -194,6 +211,33 @@ def notarize_macos_binary(
notarize_zip.unlink()
def notarize_macos_zip(zip_path: Path, env: Optional[EnvConfig] = None) -> bool:
"""Notarize a pre-built Sparkle bundle zip by submitting it directly.
``notarytool`` accepts ``.zip`` submissions and recursively scans the
Mach-O binaries inside. No extra wrapping — passing this zip through
``ditto --keepParent`` would nest zips and Apple's service would not
descend into the inner archive.
"""
if not IS_MACOS():
log_error("macOS notarization requires macOS")
return False
env = _resolve_notarization_credentials(env)
if env is None:
return False
log_info(f"Notarizing {zip_path.name}...")
try:
if not _submit_notarization(zip_path, env):
return False
log_success(f"Notarized {zip_path.name}")
return True
except Exception as e:
log_error(f"Notarization failed: {e}")
return False
def sign_windows_binary(
binary_path: Path,
env: Optional[EnvConfig] = None,
@@ -264,7 +308,6 @@ def sign_windows_binary(
signed_file = temp_output_dir / binary_path.name
if signed_file.exists():
import shutil
shutil.move(str(signed_file), str(binary_path))
try:
@@ -294,15 +337,81 @@ def sign_windows_binary(
return False
def get_entitlements_path(root_dir: Path) -> Optional[Path]:
"""Get path to server binary entitlements file"""
candidates = [
root_dir / "resources" / "entitlements" / "browseros-executable-entitlements.plist",
root_dir / "packages" / "browseros" / "resources" / "entitlements" / "browseros-executable-entitlements.plist",
def sign_server_bundle_macos(
resources_dir: Path,
env: EnvConfig,
entitlements_root: Path,
) -> bool:
"""Codesign every known binary under ``resources_dir/bin/**``.
Unknown executables are a hard error: every regular file under
``resources/bin/`` must have an entry in ``MACOS_SERVER_BINARIES``.
This prevents silently shipping an unsigned binary when a new
third-party dep is added to the agent build without being registered
in the shared sign table. The unknown-file check runs before any
codesign call so a bad release fails in seconds rather than after
several minutes of signing.
"""
bin_dir = resources_dir / "bin"
if not bin_dir.is_dir():
log_error(f"bin dir not found: {bin_dir}")
return False
# Only Mach-O-style executables need signing; any future data/config file
# shipped under resources/bin/ (plists, shell completion, etc.) is not a
# codesign target and must not trigger the unknown-binary guard.
executables = [
p
for p in sorted(bin_dir.rglob("*"))
if p.is_file() and not p.is_symlink() and os.access(p, os.X_OK)
]
unknowns = [p for p in executables if macos_sign_spec_for(p) is None]
if unknowns:
log_error(
"Unknown executables found under resources/bin/ not registered in "
"MACOS_SERVER_BINARIES (see build/common/server_binaries.py):"
)
for path in unknowns:
log_error(f" - {path.relative_to(resources_dir)}")
return False
for candidate in candidates:
if candidate.exists():
return candidate
for path in executables:
spec = macos_sign_spec_for(path)
assert spec is not None # unknowns filtered above
return None
entitlements_path: Optional[Path] = None
if spec.entitlements:
entitlements_path = entitlements_root / spec.entitlements
if not entitlements_path.exists():
log_error(
f"Missing entitlements for {path.name}: {entitlements_path}"
)
return False
if not sign_macos_binary(
path,
env,
entitlements_path,
identifier=f"com.browseros.{spec.identifier_suffix}",
options=spec.options,
):
return False
return True
def sign_server_bundle_windows(resources_dir: Path, env: EnvConfig) -> bool:
"""Sign each Windows binary enumerated in ``WINDOWS_SERVER_BINARIES``.
A missing expected binary is a hard error: publishing an incomplete
Windows bundle would ship a broken OTA update without a pipeline signal.
Symmetric with the macOS bundle's unknown-file guard.
"""
bin_dir = resources_dir / "bin"
for path in expected_windows_binary_paths(bin_dir):
if not path.exists():
log_error(f"Windows binary missing (cannot sign): {path}")
return False
if not sign_windows_binary(path, env):
return False
return True

View File

@@ -10,6 +10,7 @@ from typing import Optional, List, Dict, Tuple
from ...common.module import CommandModule, ValidationError
from ...common.context import Context
from ...common.env import EnvConfig
from ...common.server_binaries import macos_sign_spec_for
from ...common.utils import (
run_command as utils_run_command,
log_info,
@@ -20,49 +21,19 @@ from ...common.utils import (
join_paths,
)
# Central list of BrowserOS Server binaries we need to sign explicitly.
# Each entry controls identifiers, signing options, and entitlement files so
# adding a new binary is a one-line update here rather than scattered changes.
BROWSEROS_SERVER_BINARIES: Dict[str, Dict[str, str]] = {
"browseros_server": {
"identifier_suffix": "browseros_server",
"options": "runtime",
"entitlements": "browseros-executable-entitlements.plist",
},
"bun": {
"identifier_suffix": "bun",
"options": "runtime",
"entitlements": "browseros-executable-entitlements.plist",
},
"podman": {
"identifier_suffix": "podman",
"options": "runtime",
},
"gvproxy": {
"identifier_suffix": "gvproxy",
"options": "runtime",
},
"vfkit": {
"identifier_suffix": "vfkit",
"options": "runtime",
"entitlements": "podman-vfkit-entitlements.plist",
},
"krunkit": {
"identifier_suffix": "krunkit",
"options": "runtime",
"entitlements": "podman-krunkit-entitlements.plist",
},
"podman-mac-helper": {
"identifier_suffix": "podman_mac_helper",
"options": "runtime",
},
}
def get_browseros_server_binary_info(component_path: Path) -> Optional[Dict[str, str]]:
"""Return metadata for known BrowserOS Server binaries, if applicable."""
name = component_path.stem.lower()
return BROWSEROS_SERVER_BINARIES.get(name)
spec = macos_sign_spec_for(component_path)
if spec is None:
return None
info: Dict[str, str] = {
"identifier_suffix": spec.identifier_suffix,
"options": spec.options,
}
if spec.entitlements:
info["entitlements"] = spec.entitlements
return info
def run_command(

View File

@@ -7,6 +7,7 @@ from typing import List, Optional
from ...common.module import CommandModule, ValidationError
from ...common.context import Context
from ...common.env import EnvConfig
from ...common.server_binaries import expected_windows_binary_paths
from ...common.utils import (
log_info,
log_error,
@@ -16,15 +17,6 @@ from ...common.utils import (
IS_WINDOWS,
)
BROWSEROS_SERVER_BINARIES: List[str] = [
"browseros_server.exe",
"third_party/bun.exe",
"third_party/rg.exe",
"third_party/podman/podman.exe",
"third_party/podman/gvproxy.exe",
"third_party/podman/win-sshproxy.exe",
]
class WindowsSignModule(CommandModule):
produces = ["signed_installer"]
@@ -105,7 +97,7 @@ class WindowsSignModule(CommandModule):
def get_browseros_server_binary_paths(build_output_dir: Path) -> List[Path]:
"""Return absolute paths to BrowserOS Server binaries for signing."""
server_dir = build_output_dir / "BrowserOSServer" / "default" / "resources" / "bin"
return [server_dir / Path(binary) for binary in BROWSEROS_SERVER_BINARIES]
return expected_windows_binary_paths(server_dir)
def build_mini_installer(ctx: Context) -> bool:

View File

@@ -1,4 +1,4 @@
BROWSEROS_MAJOR=0
BROWSEROS_MINOR=45
BROWSEROS_BUILD=0
BROWSEROS_PATCH=1
BROWSEROS_PATCH=2