mirror of
https://github.com/browseros-ai/BrowserOS.git
synced 2026-05-13 15:46:22 +00:00
feat: remote skill download and auto-sync (#468)
* feat: add remote skill download and auto-sync Download default skills from remote catalog on first setup with bundled fallback when offline. Background sync every 45 minutes checks for new/updated skills without overwriting user-customized ones. Tracks installed defaults via content hashes in a local manifest file. * feat: make skills catalog URL configurable and add generation script Add SKILLS_CATALOG_URL env var (following CODEGEN_SERVICE_URL pattern) with fallback to the default constant. Add script to generate catalog.json from bundled defaults for static hosting. * feat: add R2 upload script and use cdn.browseros.com for catalog URL Add upload-skills-catalog.ts that generates and uploads catalog.json to Cloudflare R2 (same infra as existing build artifacts). Update default catalog URL to cdn.browseros.com/skills/v1/catalog.json. * test: add E2E tests for remote skill sync against live CDN * fix: address code review findings — security, validation, DRY - Add path traversal protection via safeSkillDir in writeSkillFile and readSkillContent (reuses existing validation from service.ts) - Add runtime type guards for catalog JSON and manifest JSON parsing - Fix seedFromRemote to return false on partial failure so bundled fallback kicks in - Add per-skill error handling in syncRemoteSkills so one bad skill doesn't crash the entire sync - Wire stopSkillSync into Application.stop() shutdown path - Extract version from frontmatter in seedFromBundled instead of hardcoding '1.0' - Consolidate duplicated logic: reuse installSkill/writeSkillFile/ contentHash/saveManifest from remote-sync.ts in seed.ts - Extract shared catalog generation into scripts/catalog-utils.ts * test: add flow tests for all four sync scenarios against live CDN * refactor: remove redundant scripts and inline catalog generation Drop generate-skills-catalog.ts, catalog-utils.ts, and e2e-remote-sync.test.ts (covered by flows.test.ts). Inline catalog generation into upload-skills-catalog.ts. * test: add full E2E server flow test against live CDN Tests all 7 steps of the real server lifecycle: fresh seed from CDN, no-op sync, user edit preservation, skill reinstall, custom skill protection, background timer firing, and second startup skip. * chore: remove e2e-server-flow test * fix: address Greptile review — entry validation, size limit, DRY, no-op saves - Validate individual skill entries in catalog (id, version, content must all be strings) not just the top-level shape - Add 1MB response size limit on catalog fetch to prevent resource exhaustion from compromised/misconfigured CDN - Skip manifest save when sync cycle had no changes (avoids unnecessary disk I/O every 45 minutes) - Share extractVersion via remote-sync.ts export, remove duplicate from seed.ts * fix: prevent bundled fallback from overwriting partial remote seeds When seedFromRemote partially fails, the bundled fallback now skips skills already in the manifest (installed by the partial remote seed). Also adds Content-Length early check before downloading the full catalog response body. * fix: run sync immediately on startup, not just on interval Previously the first sync fired 45 minutes after boot. Now startSkillSync runs one sync immediately so returning users get skill updates right away. * refactor: simplify sync — remote always wins, remove manifest Remote catalog is the source of truth. If a skill exists in the catalog, its version is compared against local frontmatter and overwritten when newer. No manifest file, no content hashes. User-created skills (IDs not in catalog) are never touched. * fix: skip bundled skills already installed by partial remote seed * chore: remove unreliable Content-Length check * chore: remove size limit checks, fetch timeout is sufficient
This commit is contained in:
@@ -19,6 +19,7 @@ export const INLINED_ENV = {
|
||||
CODEGEN_SERVICE_URL: process.env.CODEGEN_SERVICE_URL,
|
||||
POSTHOG_API_KEY: process.env.POSTHOG_API_KEY,
|
||||
BROWSEROS_CONFIG_URL: process.env.BROWSEROS_CONFIG_URL,
|
||||
SKILLS_CATALOG_URL: process.env.SKILLS_CATALOG_URL,
|
||||
} as const
|
||||
|
||||
export const REQUIRED_FOR_PRODUCTION = [
|
||||
|
||||
@@ -28,6 +28,7 @@ import { fetchDailyRateLimit } from './lib/rate-limiter/fetch-config'
|
||||
import { RateLimiter } from './lib/rate-limiter/rate-limiter'
|
||||
import { Sentry } from './lib/sentry'
|
||||
import { seedSoulTemplate } from './lib/soul'
|
||||
import { startSkillSync, stopSkillSync } from './skills/remote-sync'
|
||||
import { seedDefaultSkills } from './skills/seed'
|
||||
import { registry } from './tools/registry'
|
||||
import { VERSION } from './version'
|
||||
@@ -112,12 +113,14 @@ export class Application {
|
||||
)
|
||||
|
||||
this.logStartupSummary(controllerServerStarted)
|
||||
startSkillSync()
|
||||
|
||||
metrics.log('http_server.started', { version: VERSION })
|
||||
}
|
||||
|
||||
stop(reason?: string): void {
|
||||
logger.info('Shutting down server...', { reason })
|
||||
stopSkillSync()
|
||||
|
||||
// Immediate exit without graceful shutdown. Chromium may kill us on update/restart,
|
||||
// and we need to free the port instantly so the HTTP port doesn't keep switching.
|
||||
|
||||
173
packages/browseros-agent/apps/server/src/skills/remote-sync.ts
Normal file
173
packages/browseros-agent/apps/server/src/skills/remote-sync.ts
Normal file
@@ -0,0 +1,173 @@
|
||||
import { mkdir, readFile, writeFile } from 'node:fs/promises'
|
||||
import { join } from 'node:path'
|
||||
import { TIMEOUTS } from '@browseros/shared/constants/timeouts'
|
||||
import { EXTERNAL_URLS } from '@browseros/shared/constants/urls'
|
||||
import { INLINED_ENV } from '../env'
|
||||
import { getSkillsDir } from '../lib/browseros-dir'
|
||||
import { logger } from '../lib/logger'
|
||||
import { safeSkillDir } from './service'
|
||||
import type { RemoteSkillCatalog, RemoteSkillEntry } from './types'
|
||||
|
||||
let syncTimer: ReturnType<typeof setInterval> | null = null
|
||||
|
||||
export function extractVersion(content: string): string {
|
||||
const match = content.match(/^\s*version:\s*["']?([^"'\n]+)["']?/m)
|
||||
return match?.[1]?.trim() || '1.0'
|
||||
}
|
||||
|
||||
function isValidSkillEntry(entry: unknown): entry is RemoteSkillEntry {
|
||||
if (typeof entry !== 'object' || entry === null) return false
|
||||
const e = entry as Record<string, unknown>
|
||||
return (
|
||||
typeof e.id === 'string' &&
|
||||
typeof e.version === 'string' &&
|
||||
typeof e.content === 'string'
|
||||
)
|
||||
}
|
||||
|
||||
function isValidCatalog(data: unknown): data is RemoteSkillCatalog {
|
||||
if (typeof data !== 'object' || data === null) return false
|
||||
const d = data as Record<string, unknown>
|
||||
return (
|
||||
typeof d.version === 'number' &&
|
||||
Array.isArray(d.skills) &&
|
||||
d.skills.every(isValidSkillEntry)
|
||||
)
|
||||
}
|
||||
|
||||
function getCatalogUrl(): string {
|
||||
return INLINED_ENV.SKILLS_CATALOG_URL || EXTERNAL_URLS.SKILLS_CATALOG
|
||||
}
|
||||
|
||||
export async function fetchRemoteCatalog(): Promise<RemoteSkillCatalog | null> {
|
||||
try {
|
||||
const response = await fetch(getCatalogUrl(), {
|
||||
signal: AbortSignal.timeout(TIMEOUTS.SKILLS_FETCH),
|
||||
})
|
||||
if (!response.ok) {
|
||||
logger.warn('Failed to fetch remote skill catalog', {
|
||||
status: response.status,
|
||||
})
|
||||
return null
|
||||
}
|
||||
const data: unknown = await response.json()
|
||||
if (!isValidCatalog(data)) {
|
||||
logger.warn('Remote skill catalog has invalid format')
|
||||
return null
|
||||
}
|
||||
return data
|
||||
} catch (err) {
|
||||
logger.debug('Remote skill catalog unavailable', {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
async function getLocalVersion(skillId: string): Promise<string | null> {
|
||||
try {
|
||||
const safeDir = safeSkillDir(skillId)
|
||||
const content = await readFile(join(safeDir, 'SKILL.md'), 'utf-8')
|
||||
return extractVersion(content)
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
export async function writeSkillFile(
|
||||
skillId: string,
|
||||
content: string,
|
||||
): Promise<void> {
|
||||
const safeDir = safeSkillDir(skillId)
|
||||
await mkdir(safeDir, { recursive: true })
|
||||
await writeFile(join(safeDir, 'SKILL.md'), content)
|
||||
}
|
||||
|
||||
export async function syncRemoteSkills(): Promise<{
|
||||
installed: number
|
||||
updated: number
|
||||
}> {
|
||||
const result = { installed: 0, updated: 0 }
|
||||
const catalog = await fetchRemoteCatalog()
|
||||
if (!catalog) return result
|
||||
|
||||
for (const remoteSkill of catalog.skills) {
|
||||
try {
|
||||
const localVersion = await getLocalVersion(remoteSkill.id)
|
||||
|
||||
if (!localVersion) {
|
||||
await writeSkillFile(remoteSkill.id, remoteSkill.content)
|
||||
result.installed++
|
||||
continue
|
||||
}
|
||||
|
||||
if (localVersion === remoteSkill.version) {
|
||||
continue
|
||||
}
|
||||
|
||||
await writeSkillFile(remoteSkill.id, remoteSkill.content)
|
||||
result.updated++
|
||||
} catch (err) {
|
||||
logger.warn('Failed to sync skill', {
|
||||
id: remoteSkill.id,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
export async function seedFromRemote(): Promise<boolean> {
|
||||
const catalog = await fetchRemoteCatalog()
|
||||
if (!catalog || catalog.skills.length === 0) return false
|
||||
|
||||
let seeded = 0
|
||||
|
||||
for (const skill of catalog.skills) {
|
||||
try {
|
||||
await writeSkillFile(skill.id, skill.content)
|
||||
seeded++
|
||||
} catch (err) {
|
||||
logger.warn('Failed to seed remote skill', {
|
||||
id: skill.id,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if (seeded > 0) {
|
||||
logger.info(`Seeded ${seeded}/${catalog.skills.length} skills from remote catalog`)
|
||||
}
|
||||
|
||||
return seeded === catalog.skills.length
|
||||
}
|
||||
|
||||
async function runSync(): Promise<void> {
|
||||
try {
|
||||
const { installed, updated } = await syncRemoteSkills()
|
||||
if (installed > 0 || updated > 0) {
|
||||
logger.info('Remote skill sync completed', { installed, updated })
|
||||
}
|
||||
} catch (err) {
|
||||
logger.warn('Skill sync failed', {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export function startSkillSync(): void {
|
||||
if (syncTimer) return
|
||||
|
||||
runSync()
|
||||
|
||||
syncTimer = setInterval(runSync, TIMEOUTS.SKILLS_SYNC_INTERVAL)
|
||||
syncTimer.unref()
|
||||
}
|
||||
|
||||
export function stopSkillSync(): void {
|
||||
if (syncTimer) {
|
||||
clearInterval(syncTimer)
|
||||
syncTimer = null
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,9 @@
|
||||
import { mkdir, readdir, writeFile } from 'node:fs/promises'
|
||||
import { readdir, stat } from 'node:fs/promises'
|
||||
import { join } from 'node:path'
|
||||
import { getSkillsDir } from '../lib/browseros-dir'
|
||||
import { logger } from '../lib/logger'
|
||||
import { DEFAULT_SKILLS } from './defaults'
|
||||
import { seedFromRemote, writeSkillFile } from './remote-sync'
|
||||
|
||||
async function hasExistingSkills(skillsDir: string): Promise<boolean> {
|
||||
try {
|
||||
@@ -13,16 +14,27 @@ async function hasExistingSkills(skillsDir: string): Promise<boolean> {
|
||||
}
|
||||
}
|
||||
|
||||
async function skillExists(skillsDir: string, id: string): Promise<boolean> {
|
||||
try {
|
||||
await stat(join(skillsDir, id, 'SKILL.md'))
|
||||
return true
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
export async function seedDefaultSkills(): Promise<void> {
|
||||
const skillsDir = getSkillsDir()
|
||||
if (await hasExistingSkills(skillsDir)) return
|
||||
|
||||
const remoteSucceeded = await seedFromRemote()
|
||||
if (remoteSucceeded) return
|
||||
|
||||
let seeded = 0
|
||||
for (const skill of DEFAULT_SKILLS) {
|
||||
if (await skillExists(skillsDir, skill.id)) continue
|
||||
try {
|
||||
const targetDir = join(skillsDir, skill.id)
|
||||
await mkdir(targetDir, { recursive: true })
|
||||
await writeFile(join(targetDir, 'SKILL.md'), skill.content)
|
||||
await writeSkillFile(skill.id, skill.content)
|
||||
seeded++
|
||||
} catch (err) {
|
||||
logger.warn('Failed to seed skill', {
|
||||
@@ -33,6 +45,6 @@ export async function seedDefaultSkills(): Promise<void> {
|
||||
}
|
||||
|
||||
if (seeded > 0) {
|
||||
logger.info(`Seeded ${seeded} default skills`)
|
||||
logger.info(`Seeded ${seeded} default skills (bundled)`)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,8 +19,7 @@ export function slugify(name: string): string {
|
||||
.replace(/^-|-$/g, '')
|
||||
}
|
||||
|
||||
// Prevents path traversal — ensures resolved path stays inside skills directory
|
||||
function safeSkillDir(id: string): string {
|
||||
export function safeSkillDir(id: string): string {
|
||||
const skillsDir = getSkillsDir()
|
||||
const resolved = resolve(skillsDir, id)
|
||||
if (!resolved.startsWith(`${skillsDir}${sep}`)) {
|
||||
|
||||
@@ -38,3 +38,15 @@ export type CreateSkillInput = {
|
||||
export type UpdateSkillInput = Partial<CreateSkillInput> & {
|
||||
enabled?: boolean
|
||||
}
|
||||
|
||||
export type RemoteSkillEntry = {
|
||||
id: string
|
||||
version: string
|
||||
content: string
|
||||
}
|
||||
|
||||
export type RemoteSkillCatalog = {
|
||||
version: number
|
||||
skills: RemoteSkillEntry[]
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,90 @@
|
||||
/**
|
||||
* E2E flow tests against live CDN.
|
||||
*/
|
||||
|
||||
import { afterAll, beforeAll, describe, it, mock } from 'bun:test'
|
||||
import assert from 'node:assert'
|
||||
import { mkdir, readdir, readFile, rm, writeFile } from 'node:fs/promises'
|
||||
import { tmpdir } from 'node:os'
|
||||
import { join } from 'node:path'
|
||||
|
||||
let testDir: string
|
||||
|
||||
mock.module('../../src/lib/browseros-dir', () => ({
|
||||
getSkillsDir: () => testDir,
|
||||
}))
|
||||
|
||||
mock.module('../../src/env', () => ({
|
||||
INLINED_ENV: {
|
||||
SKILLS_CATALOG_URL: 'https://cdn.browseros.com/skills/v1/catalog.json',
|
||||
},
|
||||
}))
|
||||
|
||||
const { seedFromRemote, syncRemoteSkills } =
|
||||
await import('../../src/skills/remote-sync')
|
||||
|
||||
async function listSkills(): Promise<string[]> {
|
||||
const entries = await readdir(testDir)
|
||||
return entries.filter((e) => !e.startsWith('.')).sort()
|
||||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
testDir = join(tmpdir(), `flow-test-${Date.now()}`)
|
||||
await mkdir(testDir, { recursive: true })
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await rm(testDir, { recursive: true, force: true })
|
||||
})
|
||||
|
||||
describe('Flow tests against live CDN', () => {
|
||||
it('seeds all skills from CDN on fresh install', async () => {
|
||||
const result = await seedFromRemote()
|
||||
assert.strictEqual(result, true)
|
||||
const skills = await listSkills()
|
||||
assert.strictEqual(skills.length, 12)
|
||||
})
|
||||
|
||||
it('sync does nothing when already up to date', async () => {
|
||||
const result = await syncRemoteSkills()
|
||||
assert.strictEqual(result.installed, 0)
|
||||
assert.strictEqual(result.updated, 0)
|
||||
})
|
||||
|
||||
it('remote overwrites local edits when version differs', async () => {
|
||||
const skillPath = join(testDir, 'summarize-page', 'SKILL.md')
|
||||
const original = await readFile(skillPath, 'utf-8')
|
||||
|
||||
// User edits the file AND we fake a version mismatch
|
||||
const edited = original.replace(/version: "1.0"/, 'version: "0.9"') + '\n## My Notes\n'
|
||||
await writeFile(skillPath, edited)
|
||||
|
||||
const result = await syncRemoteSkills()
|
||||
assert.strictEqual(result.updated >= 1, true)
|
||||
|
||||
const afterSync = await readFile(skillPath, 'utf-8')
|
||||
assert.ok(!afterSync.includes('My Notes'))
|
||||
})
|
||||
|
||||
it('installs skill deleted locally', async () => {
|
||||
await rm(join(testDir, 'save-page'), { recursive: true })
|
||||
|
||||
const result = await syncRemoteSkills()
|
||||
assert.strictEqual(result.installed, 1)
|
||||
|
||||
const content = await readFile(join(testDir, 'save-page', 'SKILL.md'), 'utf-8')
|
||||
assert.ok(content.includes('name: save-page'))
|
||||
})
|
||||
|
||||
it('user-created skill is never touched', async () => {
|
||||
const customDir = join(testDir, 'my-workflow')
|
||||
await mkdir(customDir, { recursive: true })
|
||||
const custom = '---\nname: my-workflow\ndescription: custom\n---\n# Mine\n'
|
||||
await writeFile(join(customDir, 'SKILL.md'), custom)
|
||||
|
||||
await syncRemoteSkills()
|
||||
|
||||
const afterSync = await readFile(join(customDir, 'SKILL.md'), 'utf-8')
|
||||
assert.strictEqual(afterSync, custom)
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,247 @@
|
||||
import { afterEach, beforeEach, describe, it, mock, spyOn } from 'bun:test'
|
||||
import assert from 'node:assert'
|
||||
import { mkdtemp, readFile, rm, writeFile, mkdir } from 'node:fs/promises'
|
||||
import { tmpdir } from 'node:os'
|
||||
import { join } from 'node:path'
|
||||
import type { RemoteSkillCatalog } from '../../src/skills/types'
|
||||
|
||||
let testDir: string
|
||||
|
||||
const mockGetSkillsDir = mock(() => testDir)
|
||||
|
||||
mock.module('../../src/lib/browseros-dir', () => ({
|
||||
getSkillsDir: mockGetSkillsDir,
|
||||
}))
|
||||
|
||||
const { fetchRemoteCatalog, syncRemoteSkills, seedFromRemote } =
|
||||
await import('../../src/skills/remote-sync')
|
||||
|
||||
function makeCatalog(
|
||||
skills: { id: string; version: string; content: string }[],
|
||||
): RemoteSkillCatalog {
|
||||
return { version: 1, skills }
|
||||
}
|
||||
|
||||
const SKILL_V1 = `---
|
||||
name: test-skill
|
||||
description: A test skill
|
||||
metadata:
|
||||
display-name: Test Skill
|
||||
enabled: "true"
|
||||
version: "1.0"
|
||||
---
|
||||
|
||||
# Test Skill
|
||||
|
||||
Do the thing.
|
||||
`
|
||||
|
||||
const SKILL_V2 = `---
|
||||
name: test-skill
|
||||
description: A test skill (updated)
|
||||
metadata:
|
||||
display-name: Test Skill
|
||||
enabled: "true"
|
||||
version: "2.0"
|
||||
---
|
||||
|
||||
# Test Skill v2
|
||||
|
||||
Do the thing better.
|
||||
`
|
||||
|
||||
beforeEach(async () => {
|
||||
testDir = await mkdtemp(join(tmpdir(), 'skill-sync-'))
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await rm(testDir, { recursive: true, force: true })
|
||||
mock.restore()
|
||||
})
|
||||
|
||||
describe('fetchRemoteCatalog', () => {
|
||||
it('returns null on network failure', async () => {
|
||||
const spy = spyOn(globalThis, 'fetch').mockRejectedValue(new Error('offline'))
|
||||
assert.strictEqual(await fetchRemoteCatalog(), null)
|
||||
spy.mockRestore()
|
||||
})
|
||||
|
||||
it('returns null on non-ok response', async () => {
|
||||
const spy = spyOn(globalThis, 'fetch').mockResolvedValue(
|
||||
new Response('Not Found', { status: 404 }),
|
||||
)
|
||||
assert.strictEqual(await fetchRemoteCatalog(), null)
|
||||
spy.mockRestore()
|
||||
})
|
||||
|
||||
it('returns catalog on success', async () => {
|
||||
const catalog = makeCatalog([{ id: 'test', version: '1.0', content: 'hello' }])
|
||||
const spy = spyOn(globalThis, 'fetch').mockResolvedValue(
|
||||
new Response(JSON.stringify(catalog), { status: 200 }),
|
||||
)
|
||||
assert.deepStrictEqual(await fetchRemoteCatalog(), catalog)
|
||||
spy.mockRestore()
|
||||
})
|
||||
|
||||
it('returns null for invalid catalog shape', async () => {
|
||||
const spy = spyOn(globalThis, 'fetch').mockResolvedValue(
|
||||
new Response(JSON.stringify({ skills: 'not-an-array' }), { status: 200 }),
|
||||
)
|
||||
assert.strictEqual(await fetchRemoteCatalog(), null)
|
||||
spy.mockRestore()
|
||||
})
|
||||
|
||||
it('returns null when skill entries have invalid shape', async () => {
|
||||
const spy = spyOn(globalThis, 'fetch').mockResolvedValue(
|
||||
new Response(
|
||||
JSON.stringify({ version: 1, skills: [{ id: 123, version: '1.0', content: null }] }),
|
||||
{ status: 200 },
|
||||
),
|
||||
)
|
||||
assert.strictEqual(await fetchRemoteCatalog(), null)
|
||||
spy.mockRestore()
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
describe('syncRemoteSkills', () => {
|
||||
it('returns zeros when remote is unavailable', async () => {
|
||||
const spy = spyOn(globalThis, 'fetch').mockRejectedValue(new Error('offline'))
|
||||
const result = await syncRemoteSkills()
|
||||
assert.deepStrictEqual(result, { installed: 0, updated: 0 })
|
||||
spy.mockRestore()
|
||||
})
|
||||
|
||||
it('installs new skills that do not exist locally', async () => {
|
||||
const spy = spyOn(globalThis, 'fetch').mockResolvedValue(
|
||||
new Response(JSON.stringify(makeCatalog([
|
||||
{ id: 'new-skill', version: '1.0', content: SKILL_V1 },
|
||||
])), { status: 200 }),
|
||||
)
|
||||
const result = await syncRemoteSkills()
|
||||
assert.strictEqual(result.installed, 1)
|
||||
|
||||
const content = await readFile(join(testDir, 'new-skill', 'SKILL.md'), 'utf-8')
|
||||
assert.strictEqual(content, SKILL_V1)
|
||||
spy.mockRestore()
|
||||
})
|
||||
|
||||
it('updates skill when remote has newer version', async () => {
|
||||
await mkdir(join(testDir, 'test-skill'), { recursive: true })
|
||||
await writeFile(join(testDir, 'test-skill', 'SKILL.md'), SKILL_V1)
|
||||
|
||||
const spy = spyOn(globalThis, 'fetch').mockResolvedValue(
|
||||
new Response(JSON.stringify(makeCatalog([
|
||||
{ id: 'test-skill', version: '2.0', content: SKILL_V2 },
|
||||
])), { status: 200 }),
|
||||
)
|
||||
const result = await syncRemoteSkills()
|
||||
assert.strictEqual(result.updated, 1)
|
||||
|
||||
const content = await readFile(join(testDir, 'test-skill', 'SKILL.md'), 'utf-8')
|
||||
assert.strictEqual(content, SKILL_V2)
|
||||
spy.mockRestore()
|
||||
})
|
||||
|
||||
it('overwrites user-edited skill when remote has newer version', async () => {
|
||||
await mkdir(join(testDir, 'test-skill'), { recursive: true })
|
||||
await writeFile(join(testDir, 'test-skill', 'SKILL.md'), SKILL_V1 + '\n## My Notes\n')
|
||||
|
||||
const spy = spyOn(globalThis, 'fetch').mockResolvedValue(
|
||||
new Response(JSON.stringify(makeCatalog([
|
||||
{ id: 'test-skill', version: '2.0', content: SKILL_V2 },
|
||||
])), { status: 200 }),
|
||||
)
|
||||
const result = await syncRemoteSkills()
|
||||
assert.strictEqual(result.updated, 1)
|
||||
|
||||
const content = await readFile(join(testDir, 'test-skill', 'SKILL.md'), 'utf-8')
|
||||
assert.strictEqual(content, SKILL_V2)
|
||||
assert.ok(!content.includes('My Notes'))
|
||||
spy.mockRestore()
|
||||
})
|
||||
|
||||
it('skips when version matches', async () => {
|
||||
await mkdir(join(testDir, 'test-skill'), { recursive: true })
|
||||
await writeFile(join(testDir, 'test-skill', 'SKILL.md'), SKILL_V1)
|
||||
|
||||
const spy = spyOn(globalThis, 'fetch').mockResolvedValue(
|
||||
new Response(JSON.stringify(makeCatalog([
|
||||
{ id: 'test-skill', version: '1.0', content: SKILL_V1 },
|
||||
])), { status: 200 }),
|
||||
)
|
||||
const result = await syncRemoteSkills()
|
||||
assert.strictEqual(result.installed, 0)
|
||||
assert.strictEqual(result.updated, 0)
|
||||
spy.mockRestore()
|
||||
})
|
||||
|
||||
it('does not touch user-created skills not in catalog', async () => {
|
||||
await mkdir(join(testDir, 'my-custom'), { recursive: true })
|
||||
const custom = '---\nname: my-custom\ndescription: mine\nmetadata:\n version: "1.0"\n---\n# Mine\n'
|
||||
await writeFile(join(testDir, 'my-custom', 'SKILL.md'), custom)
|
||||
|
||||
const spy = spyOn(globalThis, 'fetch').mockResolvedValue(
|
||||
new Response(JSON.stringify(makeCatalog([
|
||||
{ id: 'other-skill', version: '1.0', content: SKILL_V1 },
|
||||
])), { status: 200 }),
|
||||
)
|
||||
await syncRemoteSkills()
|
||||
|
||||
const content = await readFile(join(testDir, 'my-custom', 'SKILL.md'), 'utf-8')
|
||||
assert.strictEqual(content, custom)
|
||||
spy.mockRestore()
|
||||
})
|
||||
|
||||
it('rejects path traversal in skill ids', async () => {
|
||||
const spy = spyOn(globalThis, 'fetch').mockResolvedValue(
|
||||
new Response(JSON.stringify(makeCatalog([
|
||||
{ id: '../../etc/evil', version: '1.0', content: SKILL_V1 },
|
||||
])), { status: 200 }),
|
||||
)
|
||||
const result = await syncRemoteSkills()
|
||||
assert.strictEqual(result.installed, 0)
|
||||
spy.mockRestore()
|
||||
})
|
||||
})
|
||||
|
||||
describe('seedFromRemote', () => {
|
||||
it('returns false when remote is unavailable', async () => {
|
||||
const spy = spyOn(globalThis, 'fetch').mockRejectedValue(new Error('offline'))
|
||||
assert.strictEqual(await seedFromRemote(), false)
|
||||
spy.mockRestore()
|
||||
})
|
||||
|
||||
it('seeds all skills from remote', async () => {
|
||||
const spy = spyOn(globalThis, 'fetch').mockResolvedValue(
|
||||
new Response(JSON.stringify(makeCatalog([
|
||||
{ id: 'skill-a', version: '1.0', content: SKILL_V1 },
|
||||
{ id: 'skill-b', version: '1.0', content: SKILL_V2 },
|
||||
])), { status: 200 }),
|
||||
)
|
||||
assert.strictEqual(await seedFromRemote(), true)
|
||||
|
||||
const content = await readFile(join(testDir, 'skill-a', 'SKILL.md'), 'utf-8')
|
||||
assert.strictEqual(content, SKILL_V1)
|
||||
spy.mockRestore()
|
||||
})
|
||||
|
||||
it('returns false for empty catalog', async () => {
|
||||
const spy = spyOn(globalThis, 'fetch').mockResolvedValue(
|
||||
new Response(JSON.stringify(makeCatalog([])), { status: 200 }),
|
||||
)
|
||||
assert.strictEqual(await seedFromRemote(), false)
|
||||
spy.mockRestore()
|
||||
})
|
||||
|
||||
it('returns false on partial failure', async () => {
|
||||
const spy = spyOn(globalThis, 'fetch').mockResolvedValue(
|
||||
new Response(JSON.stringify(makeCatalog([
|
||||
{ id: 'good-skill', version: '1.0', content: SKILL_V1 },
|
||||
{ id: '../../traversal', version: '1.0', content: 'evil' },
|
||||
])), { status: 200 }),
|
||||
)
|
||||
assert.strictEqual(await seedFromRemote(), false)
|
||||
spy.mockRestore()
|
||||
})
|
||||
})
|
||||
@@ -32,6 +32,8 @@ export const TIMEOUTS = {
|
||||
|
||||
// External API calls
|
||||
KLAVIS_FETCH: 30_000,
|
||||
SKILLS_FETCH: 15_000,
|
||||
SKILLS_SYNC_INTERVAL: 45 * 60_000,
|
||||
|
||||
// Navigation/DOM
|
||||
NAVIGATION: 10_000,
|
||||
|
||||
@@ -10,4 +10,5 @@ export const EXTERNAL_URLS = {
|
||||
KLAVIS_PROXY: 'https://llm.browseros.com/klavis',
|
||||
POSTHOG_DEFAULT: 'https://us.i.posthog.com',
|
||||
CODEGEN_SERVICE: 'https://graph.browseros.com',
|
||||
SKILLS_CATALOG: 'https://cdn.browseros.com/skills/v1/catalog.json',
|
||||
} as const
|
||||
|
||||
71
packages/browseros-agent/scripts/upload-skills-catalog.ts
Normal file
71
packages/browseros-agent/scripts/upload-skills-catalog.ts
Normal file
@@ -0,0 +1,71 @@
|
||||
import { readdir, readFile, stat } from 'node:fs/promises'
|
||||
import { join } from 'node:path'
|
||||
import { PutObjectCommand, S3Client } from '@aws-sdk/client-s3'
|
||||
import type { RemoteSkillCatalog, RemoteSkillEntry } from '../apps/server/src/skills/types'
|
||||
|
||||
const DEFAULTS_DIR = join(import.meta.dir, '../apps/server/src/skills/defaults')
|
||||
const R2_KEY = 'skills/v1/catalog.json'
|
||||
|
||||
function extractVersion(content: string): string {
|
||||
const match = content.match(/^\s*version:\s*["']?([^"'\n]+)["']?/m)
|
||||
return match?.[1]?.trim() || '1.0'
|
||||
}
|
||||
|
||||
async function generateCatalog(): Promise<RemoteSkillCatalog> {
|
||||
const entries = await readdir(DEFAULTS_DIR)
|
||||
const skills: RemoteSkillEntry[] = []
|
||||
|
||||
for (const entry of entries) {
|
||||
const entryPath = join(DEFAULTS_DIR, entry)
|
||||
const info = await stat(entryPath)
|
||||
if (!info.isDirectory()) continue
|
||||
|
||||
const skillPath = join(entryPath, 'SKILL.md')
|
||||
try {
|
||||
const content = await readFile(skillPath, 'utf-8')
|
||||
skills.push({ id: entry, version: extractVersion(content), content })
|
||||
} catch {
|
||||
console.error(`Skipping ${entry}: no SKILL.md found`)
|
||||
}
|
||||
}
|
||||
|
||||
skills.sort((a, b) => a.id.localeCompare(b.id))
|
||||
return { version: 1, skills }
|
||||
}
|
||||
|
||||
function requireEnv(name: string): string {
|
||||
const value = process.env[name]
|
||||
if (!value) {
|
||||
console.error(`Missing required env var: ${name}`)
|
||||
process.exit(1)
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
const accountId = requireEnv('R2_ACCOUNT_ID')
|
||||
const accessKeyId = requireEnv('R2_ACCESS_KEY_ID')
|
||||
const secretAccessKey = requireEnv('R2_SECRET_ACCESS_KEY')
|
||||
const bucket = requireEnv('R2_BUCKET')
|
||||
|
||||
const client = new S3Client({
|
||||
region: 'auto',
|
||||
endpoint: `https://${accountId}.r2.cloudflarestorage.com`,
|
||||
credentials: { accessKeyId, secretAccessKey },
|
||||
})
|
||||
|
||||
const catalog = await generateCatalog()
|
||||
const body = JSON.stringify(catalog, null, 2)
|
||||
|
||||
console.log(`Generated catalog with ${catalog.skills.length} skills`)
|
||||
|
||||
await client.send(
|
||||
new PutObjectCommand({
|
||||
Bucket: bucket,
|
||||
Key: R2_KEY,
|
||||
Body: body,
|
||||
ContentType: 'application/json',
|
||||
CacheControl: 'public, max-age=300',
|
||||
}),
|
||||
)
|
||||
|
||||
console.log(`Uploaded to R2: ${bucket}/${R2_KEY}`)
|
||||
Reference in New Issue
Block a user