feat: add server release workflow (#627)

* feat: add server release workflow

* fix: address PR review comments for 0331-add_server_release_workflow

* refactor: rework 0331-add_server_release_workflow based on feedback

* refactor: rework 0331-add_server_release_workflow based on feedback
This commit is contained in:
Nikhil
2026-03-31 17:37:06 -07:00
committed by GitHub
parent 17be06eb2f
commit f0cbf77924
8 changed files with 344 additions and 39 deletions

152
.github/workflows/release-server.yml vendored Normal file
View File

@@ -0,0 +1,152 @@
name: Release BrowserOS Server
on:
workflow_dispatch:
inputs:
version:
description: "Release version (e.g. 0.0.80)"
required: true
type: string
concurrency:
group: release-server
cancel-in-progress: false
jobs:
release:
if: github.ref == 'refs/heads/main'
runs-on: ubuntu-latest
environment: release-core
permissions:
contents: write
defaults:
run:
working-directory: packages/browseros-agent
steps:
- uses: actions/checkout@v6
with:
fetch-depth: 0
- uses: oven-sh/setup-bun@v2
with:
bun-version: "1.3.6"
- name: Install dependencies
run: bun ci
- name: Prepare production env file
run: cp apps/server/.env.production.example apps/server/.env.production
- name: Validate version
id: version
env:
REQUESTED_VERSION: ${{ inputs.version }}
run: |
PACKAGE_VERSION=$(node -p "require('./apps/server/package.json').version")
echo "package_version=$PACKAGE_VERSION" >> "$GITHUB_OUTPUT"
echo "release_sha=$(git rev-parse HEAD)" >> "$GITHUB_OUTPUT"
if [ "$PACKAGE_VERSION" != "$REQUESTED_VERSION" ]; then
echo "Requested version $REQUESTED_VERSION does not match apps/server/package.json ($PACKAGE_VERSION)"
exit 1
fi
- name: Build release artifacts
env:
BROWSEROS_CONFIG_URL: ${{ secrets.BROWSEROS_CONFIG_URL }}
CODEGEN_SERVICE_URL: ${{ secrets.CODEGEN_SERVICE_URL }}
POSTHOG_API_KEY: ${{ secrets.POSTHOG_API_KEY }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
run: bun run build:server:ci
- name: Verify release artifacts
run: |
mapfile -t ZIP_FILES < <(find dist/prod/server -maxdepth 1 -type f -name 'browseros-server-resources-*.zip' | sort)
if [ "${#ZIP_FILES[@]}" -eq 0 ]; then
echo "No server release zip files were produced"
exit 1
fi
printf 'Found release artifacts:\n%s\n' "${ZIP_FILES[@]}"
- name: Generate release notes
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
PACKAGE_VERSION: ${{ steps.version.outputs.package_version }}
run: |
SERVER_APP_PATH="packages/browseros-agent/apps/server"
SERVER_BUILD_DIR="packages/browseros-agent/scripts/build/server"
SERVER_BUILD_ENTRY="packages/browseros-agent/scripts/build/server.ts"
SERVER_RESOURCE_MANIFEST="packages/browseros-agent/scripts/build/config/server-prod-resources.json"
SERVER_WORKSPACE_PKG="packages/browseros-agent/package.json"
CURRENT_TAG="browseros-server-v$PACKAGE_VERSION"
PREV_TAG=$(git tag -l "browseros-server-v*" --sort=-v:refname | grep -v "^${CURRENT_TAG}$" | head -n 1)
if [ -z "$PREV_TAG" ]; then
echo "Initial release of browseros-server." > /tmp/release-notes.md
else
COMMITS=$(git log "$PREV_TAG"..HEAD --pretty=format:"%H" -- \
"$SERVER_APP_PATH" \
"$SERVER_BUILD_DIR" \
"$SERVER_BUILD_ENTRY" \
"$SERVER_RESOURCE_MANIFEST" \
"$SERVER_WORKSPACE_PKG")
if [ -z "$COMMITS" ]; then
echo "No notable changes." > /tmp/release-notes.md
else
echo "## What's Changed" > /tmp/release-notes.md
echo "" >> /tmp/release-notes.md
while IFS= read -r SHA; do
SUBJECT=$(git log -1 --pretty=format:"%s" "$SHA")
PR_NUM=$(gh api "/repos/${{ github.repository }}/commits/${SHA}/pulls" --jq '.[0].number // empty' 2>/dev/null)
if [ -n "$PR_NUM" ] && ! echo "$SUBJECT" | grep -qF "(#${PR_NUM})"; then
echo "- ${SUBJECT} (#${PR_NUM})" >> /tmp/release-notes.md
else
echo "- ${SUBJECT}" >> /tmp/release-notes.md
fi
done <<< "$COMMITS"
fi
fi
working-directory: ${{ github.workspace }}
- name: Create GitHub release
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
PACKAGE_VERSION: ${{ steps.version.outputs.package_version }}
RELEASE_SHA: ${{ steps.version.outputs.release_sha }}
run: |
TAG="browseros-server-v$PACKAGE_VERSION"
TITLE="BrowserOS Server - v$PACKAGE_VERSION"
mapfile -t ZIP_FILES < <(find packages/browseros-agent/dist/prod/server -maxdepth 1 -type f -name 'browseros-server-resources-*.zip' | sort)
git config user.name "github-actions[bot]"
git config user.email "github-actions[bot]@users.noreply.github.com"
if git rev-parse "$TAG" >/dev/null 2>&1; then
echo "Tag $TAG already exists, skipping tag creation"
else
git tag -a "$TAG" -m "browseros-server v$PACKAGE_VERSION" "$RELEASE_SHA"
fi
if git ls-remote --tags origin "$TAG" | grep -q "$TAG"; then
echo "Tag $TAG already on remote, skipping push"
else
git push origin "$TAG"
fi
if gh release view "$TAG" >/dev/null 2>&1; then
echo "Release $TAG already exists, updating"
gh release edit "$TAG" --title "$TITLE" --notes-file /tmp/release-notes.md
gh release upload "$TAG" "${ZIP_FILES[@]}" --clobber
else
gh release create "$TAG" \
--title "$TITLE" \
--notes-file /tmp/release-notes.md \
"${ZIP_FILES[@]}"
fi
working-directory: ${{ github.workspace }}

View File

@@ -8,7 +8,13 @@
import { afterAll, describe, it } from 'bun:test'
import assert from 'node:assert'
import { mkdtempSync, rmSync, writeFileSync } from 'node:fs'
import {
existsSync,
mkdtempSync,
readFileSync,
rmSync,
writeFileSync,
} from 'node:fs'
import { tmpdir } from 'node:os'
import { join, resolve } from 'node:path'
@@ -25,11 +31,14 @@ function getNativeTarget(): { id: string; ext: string } {
}
// Stub values so the build config validation passes without real secrets
const BUILD_ENV_STUBS: Record<string, string> = {
const INLINE_ENV_STUBS: Record<string, string> = {
BROWSEROS_CONFIG_URL: 'https://stub.test/config',
CODEGEN_SERVICE_URL: 'https://stub.test/codegen',
POSTHOG_API_KEY: 'phc_test_stub',
SENTRY_DSN: 'https://stub@sentry.test/0',
}
const R2_ENV_STUBS: Record<string, string> = {
R2_ACCOUNT_ID: 'test',
R2_ACCESS_KEY_ID: 'test',
R2_SECRET_ACCESS_KEY: 'test',
@@ -39,20 +48,51 @@ const BUILD_ENV_STUBS: Record<string, string> = {
describe('server build', () => {
const rootDir = resolve(import.meta.dir, '../../..')
const serverPkgPath = resolve(rootDir, 'apps/server/package.json')
const prodEnvPath = resolve(rootDir, 'apps/server/.env.production')
const prodEnvTemplatePath = resolve(
rootDir,
'apps/server/.env.production.example',
)
const buildScript = resolve(rootDir, 'scripts/build/server.ts')
const target = getNativeTarget()
const binaryPath = resolve(
rootDir,
`dist/prod/server/.tmp/binaries/browseros-server-${target.id}${target.ext}`,
)
const zipPath = resolve(
rootDir,
`dist/prod/server/browseros-server-resources-${target.id}.zip`,
)
const createdProdEnv = !existsSync(prodEnvPath)
// Empty manifest so the build skips R2 resource downloads
const tempDir = mkdtempSync(join(tmpdir(), 'browseros-build-test-'))
const emptyManifestPath = join(tempDir, 'empty-manifest.json')
writeFileSync(emptyManifestPath, JSON.stringify({ resources: [] }))
if (createdProdEnv) {
writeFileSync(prodEnvPath, readFileSync(prodEnvTemplatePath, 'utf-8'))
}
function buildEnv(
extraEnv: Record<string, string>,
omitKeys: string[] = [],
): NodeJS.ProcessEnv {
const env: NodeJS.ProcessEnv = {
...process.env,
...INLINE_ENV_STUBS,
...extraEnv,
}
for (const key of omitKeys) {
delete env[key]
}
return env
}
afterAll(() => {
rmSync(tempDir, { recursive: true, force: true })
if (createdProdEnv) {
rmSync(prodEnvPath, { force: true })
}
})
it('compiles and --version outputs correct version', async () => {
@@ -71,7 +111,7 @@ describe('server build', () => {
cwd: rootDir,
stdout: 'pipe',
stderr: 'pipe',
env: { ...process.env, ...BUILD_ENV_STUBS },
env: buildEnv(R2_ENV_STUBS),
},
)
const buildExit = await build.exited
@@ -97,4 +137,36 @@ describe('server build', () => {
)
assert.strictEqual(versionOutput.trim(), expectedVersion)
}, 300_000)
it('archives compile-only builds without R2 config', async () => {
rmSync(zipPath, { force: true })
const build = Bun.spawn(
[
'bun',
buildScript,
`--target=${target.id}`,
'--compile-only',
'--archive-compiled',
],
{
cwd: rootDir,
stdout: 'pipe',
stderr: 'pipe',
env: buildEnv({}, [
'R2_ACCOUNT_ID',
'R2_ACCESS_KEY_ID',
'R2_SECRET_ACCESS_KEY',
'R2_BUCKET',
]),
},
)
const buildExit = await build.exited
if (buildExit !== 0) {
const stderr = await new Response(build.stderr).text()
assert.fail(`Compile-only archive failed (exit ${buildExit}):\n${stderr}`)
}
assert.ok(existsSync(zipPath), `Expected archive at ${zipPath}`)
}, 300_000)
})

View File

@@ -19,7 +19,7 @@
"start:agent": "bun run --filter @browseros/agent dev",
"build": "bun run build:server && bun run build:agent",
"build:server": "FORCE_COLOR=1 bun scripts/build/server.ts --target=all",
"build:server:ci": "FORCE_COLOR=1 bun scripts/build/server.ts --target=all --compile-only",
"build:server:ci": "FORCE_COLOR=1 bun scripts/build/server.ts --target=all --compile-only --archive-compiled",
"build:server:test": "FORCE_COLOR=1 bun scripts/build/server.ts --target=darwin-arm64 --no-upload",
"upload:cli-installers": "bun scripts/build/cli.ts",
"start:server:test": "bun run build:server:test && set -a && . apps/server/.env.development && set +a && dist/prod/server/.tmp/binaries/browseros-server-darwin-arm64",

View File

@@ -37,29 +37,39 @@ export async function archiveAndUploadArtifacts(
r2: R2Config,
upload: boolean,
): Promise<UploadResult[]> {
const results: UploadResult[] = []
for (const artifact of artifacts) {
const zipPath = zipPathForArtifact(artifact)
await zipArtifactRoot(artifact.rootDir, zipPath)
const results = await archiveArtifacts(artifacts)
if (!upload) {
results.push({ targetId: artifact.target.id, zipPath })
continue
return results
}
const fileName = basename(zipPath)
const uploadedResults: UploadResult[] = []
for (const result of results) {
const fileName = basename(result.zipPath)
const latestR2Key = joinObjectKey(r2.uploadPrefix, 'latest', fileName)
const versionR2Key = joinObjectKey(r2.uploadPrefix, version, fileName)
await uploadFileToObject(client, r2, latestR2Key, zipPath)
await uploadFileToObject(client, r2, versionR2Key, zipPath)
results.push({
targetId: artifact.target.id,
zipPath,
await uploadFileToObject(client, r2, latestR2Key, result.zipPath)
await uploadFileToObject(client, r2, versionR2Key, result.zipPath)
uploadedResults.push({
targetId: result.targetId,
zipPath: result.zipPath,
latestR2Key,
versionR2Key,
})
}
return uploadedResults
}
export async function archiveArtifacts(
artifacts: StagedArtifact[],
): Promise<UploadResult[]> {
const results: UploadResult[] = []
for (const artifact of artifacts) {
const zipPath = zipPathForArtifact(artifact)
await zipArtifactRoot(artifact.rootDir, zipPath)
results.push({ targetId: artifact.target.id, zipPath })
}
return results
}

View File

@@ -25,20 +25,30 @@ export function parseBuildArgs(argv: string[]): BuildArgs {
'--compile-only',
'Compile binaries only (skip R2 staging and upload)',
)
.option(
'--archive-compiled',
'Archive compile-only binaries into local zip files without R2 resources',
)
program.parse(argv, { from: 'user' })
const options = program.opts<{
target: string
manifest: string
upload: boolean
compileOnly: boolean
archiveCompiled: boolean
}>()
const compileOnly = options.compileOnly ?? false
const archiveCompiled = options.archiveCompiled ?? false
if (archiveCompiled && !compileOnly) {
throw new Error('--archive-compiled requires --compile-only')
}
return {
targets: resolveTargets(options.target),
manifestPath: options.manifest,
upload: compileOnly ? false : (options.upload ?? true),
compileOnly,
archiveCompiled,
}
}

View File

@@ -2,13 +2,23 @@ import { existsSync } from 'node:fs'
import { resolve } from 'node:path'
import { log } from '../log'
import { archiveAndUploadArtifacts } from './archive'
import { archiveAndUploadArtifacts, archiveArtifacts } from './archive'
import { parseBuildArgs } from './cli'
import { compileServerBinaries, getDistProdRoot } from './compile'
import { loadBuildConfig } from './config'
import { getTargetRules, loadManifest } from './manifest'
import { createR2Client } from './r2'
import { stageTargetArtifact } from './stage'
import { stageCompiledArtifact, stageTargetArtifact } from './stage'
function buildModeLabel(argv: {
compileOnly: boolean
archiveCompiled: boolean
}): string {
if (argv.compileOnly && argv.archiveCompiled) {
return 'compile-only+archive'
}
return argv.compileOnly ? 'compile-only' : 'full'
}
export async function runProdResourceBuild(argv: string[]): Promise<void> {
const rootDir = resolve(import.meta.dir, '../../..')
@@ -22,7 +32,7 @@ export async function runProdResourceBuild(argv: string[]): Promise<void> {
log.header(`Building BrowserOS server artifacts v${buildConfig.version}`)
log.info(`Targets: ${args.targets.map((target) => target.id).join(', ')}`)
log.info(`Mode: ${args.compileOnly ? 'compile-only' : 'full'}`)
log.info(`Mode: ${buildModeLabel(args)}`)
const compiled = await compileServerBinaries(
args.targets,
@@ -32,6 +42,30 @@ export async function runProdResourceBuild(argv: string[]): Promise<void> {
)
if (args.compileOnly) {
if (args.archiveCompiled) {
const distRoot = getDistProdRoot()
const localArtifacts = []
for (const binary of compiled) {
log.step(`Packaging ${binary.target.name}`)
const staged = await stageCompiledArtifact(
distRoot,
binary.binaryPath,
binary.target,
buildConfig.version,
)
localArtifacts.push(staged)
log.success(`Packaged ${binary.target.id}`)
}
const archiveResults = await archiveArtifacts(localArtifacts)
log.done('Compile-only archive build completed')
for (const result of archiveResults) {
log.info(`${result.targetId}: ${result.zipPath}`)
}
return
}
log.done('Compile-only build completed')
for (const binary of compiled) {
log.info(`${binary.target.id}: ${binary.binaryPath}`)

View File

@@ -32,6 +32,36 @@ async function copyServerBinary(
}
}
async function createArtifactRoot(
distRoot: string,
compiledBinaryPath: string,
target: BuildTarget,
): Promise<string> {
const rootDir = artifactRoot(distRoot, target)
await rm(rootDir, { recursive: true, force: true })
await mkdir(rootDir, { recursive: true })
await copyServerBinary(
compiledBinaryPath,
serverDestinationPath(rootDir, target),
target,
)
return rootDir
}
async function finalizeArtifact(
rootDir: string,
target: BuildTarget,
version: string,
): Promise<StagedArtifact> {
const metadataPath = await writeArtifactMetadata(rootDir, target, version)
return {
target,
rootDir,
resourcesDir: join(rootDir, 'resources'),
metadataPath,
}
}
function resolveDestination(rootDir: string, destination: string): string {
const outputPath = join(rootDir, destination)
const relativePath = relative(rootDir, outputPath)
@@ -67,25 +97,21 @@ export async function stageTargetArtifact(
r2: R2Config,
version: string,
): Promise<StagedArtifact> {
const rootDir = artifactRoot(distRoot, target)
await rm(rootDir, { recursive: true, force: true })
await mkdir(rootDir, { recursive: true })
await copyServerBinary(
compiledBinaryPath,
serverDestinationPath(rootDir, target),
target,
)
const rootDir = await createArtifactRoot(distRoot, compiledBinaryPath, target)
for (const rule of rules) {
await stageRule(rootDir, rule, target, client, r2)
}
const metadataPath = await writeArtifactMetadata(rootDir, target, version)
return {
target,
rootDir,
resourcesDir: join(rootDir, 'resources'),
metadataPath,
}
return finalizeArtifact(rootDir, target, version)
}
export async function stageCompiledArtifact(
distRoot: string,
compiledBinaryPath: string,
target: BuildTarget,
version: string,
): Promise<StagedArtifact> {
const rootDir = await createArtifactRoot(distRoot, compiledBinaryPath, target)
return finalizeArtifact(rootDir, target, version)
}

View File

@@ -22,6 +22,7 @@ export interface BuildArgs {
manifestPath: string
upload: boolean
compileOnly: boolean
archiveCompiled: boolean
}
export interface R2Config {