diff --git a/.agents/skills/openclaw-testing/SKILL.md b/.agents/skills/openclaw-testing/SKILL.md index a40d18d1994..98b069744ab 100644 --- a/.agents/skills/openclaw-testing/SKILL.md +++ b/.agents/skills/openclaw-testing/SKILL.md @@ -555,6 +555,13 @@ top-level phase timings for preflight, image build, package prep, lane pools, and cleanup. Use `pnpm test:docker:timings ` to rank slow lanes and phases before deciding whether a broader rerun is justified. +Skill install proof: use `pnpm test:docker:skill-install` or targeted +`docker_lanes=skill-install` for live ClawHub skill-install validation. The +lane installs the package tarball in a bare runner, keeps +`skills.install.allowUploadedArchives=false`, resolves the current live slug +from `openclaw skills search`, installs it, and verifies `.clawhub` origin/lock +metadata. Prefer this checked-in script over inline heredoc Testbox recipes. + ## Cheap Docker Reruns First derive the smallest rerun command from artifacts: diff --git a/.github/workflows/openclaw-release-checks.yml b/.github/workflows/openclaw-release-checks.yml index f8d7938df67..6feb007a4b1 100644 --- a/.github/workflows/openclaw-release-checks.yml +++ b/.github/workflows/openclaw-release-checks.yml @@ -595,7 +595,7 @@ jobs: artifact_name: ${{ needs.prepare_release_package.outputs.artifact_name }} package_sha256: ${{ needs.resolve_target.outputs.package_acceptance_package_spec == '' && needs.prepare_release_package.outputs.package_sha256 || '' }} suite_profile: custom - docker_lanes: doctor-switch update-channel-switch update-corrupt-plugin upgrade-survivor published-upgrade-survivor update-restart-auth plugins-offline plugin-update + docker_lanes: doctor-switch update-channel-switch skill-install update-corrupt-plugin upgrade-survivor published-upgrade-survivor update-restart-auth plugins-offline plugin-update published_upgrade_survivor_baselines: ${{ needs.resolve_target.outputs.run_release_soak == 'true' && 'last-stable-4 2026.4.23 2026.5.2 2026.4.15' || '' }} published_upgrade_survivor_scenarios: ${{ needs.resolve_target.outputs.run_release_soak == 'true' && 'reported-issues' || '' }} telegram_mode: mock-openai diff --git a/.github/workflows/package-acceptance.yml b/.github/workflows/package-acceptance.yml index c54e83461dc..45a9288f69f 100644 --- a/.github/workflows/package-acceptance.yml +++ b/.github/workflows/package-acceptance.yml @@ -386,10 +386,10 @@ jobs: docker_lanes="npm-onboard-channel-agent gateway-network config-reload" ;; package) - docker_lanes="npm-onboard-channel-agent doctor-switch update-channel-switch update-corrupt-plugin upgrade-survivor published-upgrade-survivor update-restart-auth plugins-offline plugin-update" + docker_lanes="npm-onboard-channel-agent doctor-switch update-channel-switch skill-install update-corrupt-plugin upgrade-survivor published-upgrade-survivor update-restart-auth plugins-offline plugin-update" ;; product) - docker_lanes="npm-onboard-channel-agent doctor-switch update-channel-switch update-corrupt-plugin upgrade-survivor published-upgrade-survivor update-restart-auth plugins plugin-update mcp-channels cron-mcp-cleanup openai-web-search-minimal openwebui" + docker_lanes="npm-onboard-channel-agent doctor-switch update-channel-switch skill-install update-corrupt-plugin upgrade-survivor published-upgrade-survivor update-restart-auth plugins plugin-update mcp-channels cron-mcp-cleanup openai-web-search-minimal openwebui" include_openwebui=true ;; full) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1886d2af067..4d85afa2f8a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,7 @@ Docs: https://docs.openclaw.ai ### Changes +- Gateway/skills: add an opt-in private skill archive upload install path gated by `skills.install.allowUploadedArchives`, so trusted Gateway clients can stage and install zip-backed skills only when operators explicitly enable the code-install surface. (#74430) Thanks @samzong. - Agents/compaction: preserve scoped background exec/process session references across embedded compaction and after-turn runtime contexts without exposing sessions from unrelated scopes. Fixes #79284. (#79307) Thanks @TurboTheTurtle. - CLI/onboarding: improve setup, onboarding, configure, and channel command wayfinding so terminal flows explain the next useful command instead of relying on terse setup labels. diff --git a/apps/shared/OpenClawKit/Sources/OpenClawProtocol/GatewayModels.swift b/apps/shared/OpenClawKit/Sources/OpenClawProtocol/GatewayModels.swift index 41edc17ebe2..ad1ec5bcf98 100644 --- a/apps/shared/OpenClawKit/Sources/OpenClawProtocol/GatewayModels.swift +++ b/apps/shared/OpenClawKit/Sources/OpenClawProtocol/GatewayModels.swift @@ -4878,6 +4878,80 @@ public struct SkillsDetailResult: Codable, Sendable { } } +public struct SkillsUploadBeginParams: Codable, Sendable { + public let kind: String + public let slug: String + public let sizebytes: Int + public let sha256: String? + public let force: Bool? + public let idempotencykey: String? + + public init( + kind: String, + slug: String, + sizebytes: Int, + sha256: String?, + force: Bool?, + idempotencykey: String?) + { + self.kind = kind + self.slug = slug + self.sizebytes = sizebytes + self.sha256 = sha256 + self.force = force + self.idempotencykey = idempotencykey + } + + private enum CodingKeys: String, CodingKey { + case kind + case slug + case sizebytes = "sizeBytes" + case sha256 + case force + case idempotencykey = "idempotencyKey" + } +} + +public struct SkillsUploadChunkParams: Codable, Sendable { + public let uploadid: String + public let offset: Int + public let database64: String + + public init( + uploadid: String, + offset: Int, + database64: String) + { + self.uploadid = uploadid + self.offset = offset + self.database64 = database64 + } + + private enum CodingKeys: String, CodingKey { + case uploadid = "uploadId" + case offset + case database64 = "dataBase64" + } +} + +public struct SkillsUploadCommitParams: Codable, Sendable { + public let uploadid: String + public let sha256: String? + + public init( + uploadid: String, + sha256: String?) + { + self.uploadid = uploadid + self.sha256 = sha256 + } + + private enum CodingKeys: String, CodingKey { + case uploadid = "uploadId" + case sha256 + } +} + public struct CronJob: Codable, Sendable { public let id: String public let agentid: String? diff --git a/docs/.generated/config-baseline.sha256 b/docs/.generated/config-baseline.sha256 index ca2284baab5..e416e8b4f3e 100644 --- a/docs/.generated/config-baseline.sha256 +++ b/docs/.generated/config-baseline.sha256 @@ -1,4 +1,4 @@ -335083781741da50b280496b954794bdecba7c1150ce777d37534ccc1ec2c10a config-baseline.json -b629f3b6ec6389eb0709e6f9149d7c3ab50431bb22124019541710873dc52cbb config-baseline.core.json -9edc62ae7dfedabc645470dd03102b813fc780b9108caf675fd661104714206f config-baseline.channel.json -1da42cb10427fb08510f29732493d24851ab915a424f91556569febdd450d9c3 config-baseline.plugin.json +6a0d49baf0a07563146d9c3fa7650175f4b3e5bf8ed67a8f8a1930fd8dc18f3b config-baseline.json +b58f16663413d37906072f3ed7fa7c0ed0bac6347ab76627180ea4f21331b9c5 config-baseline.core.json +25c6e70d5b4925e07549072159ce4fcad45813fed12fa36a2f43d3568ca8dd96 config-baseline.channel.json +af8a8e8616a0146ad989ff1bc0e8cf62c61a4d434dd67bbe7fe082c5c204fada config-baseline.plugin.json diff --git a/docs/ci.md b/docs/ci.md index d434f5f32ae..18b2e49dd19 100644 --- a/docs/ci.md +++ b/docs/ci.md @@ -258,7 +258,7 @@ Keep `workflow_ref` and `package_ref` separate. `workflow_ref` is the trusted wo ### Suite profiles - `smoke` — `npm-onboard-channel-agent`, `gateway-network`, `config-reload` -- `package` — `npm-onboard-channel-agent`, `doctor-switch`, `update-channel-switch`, `upgrade-survivor`, `published-upgrade-survivor`, `plugins-offline`, `plugin-update` +- `package` — `npm-onboard-channel-agent`, `doctor-switch`, `update-channel-switch`, `skill-install`, `update-corrupt-plugin`, `upgrade-survivor`, `published-upgrade-survivor`, `update-restart-auth`, `plugins-offline`, `plugin-update` - `product` — `package` plus `mcp-channels`, `cron-mcp-cleanup`, `openai-web-search-minimal`, `openwebui` - `full` — full Docker release-path chunks with OpenWebUI - `custom` — exact `docker_lanes`; required when `suite_profile=custom` @@ -269,7 +269,7 @@ For the dedicated update and plugin testing policy, including local commands, Docker lanes, Package Acceptance inputs, release defaults, and failure triage, see [Testing updates and plugins](/help/testing-updates-plugins). -Release checks call Package Acceptance with `source=artifact`, the prepared release package artifact, `suite_profile=custom`, `docker_lanes='doctor-switch update-channel-switch upgrade-survivor published-upgrade-survivor plugins-offline plugin-update'`, and `telegram_mode=mock-openai`. This keeps package migration, update, stale-plugin-dependency cleanup, configured-plugin install repair, offline plugin, plugin-update, and Telegram proof on the same resolved package tarball. Set `package_acceptance_package_spec` on Full Release Validation or OpenClaw Release Checks to run that same matrix against a shipped npm package instead of the SHA-built artifact. Cross-OS release checks still cover OS-specific onboarding, installer, and platform behavior; package/update product validation should start with Package Acceptance. The `published-upgrade-survivor` Docker lane validates one published package baseline per run in the blocking release path. In Package Acceptance, the resolved `package-under-test` tarball is always the candidate and `published_upgrade_survivor_baseline` selects the fallback published baseline, defaulting to `openclaw@latest`; failed-lane rerun commands preserve that baseline. Full Release Validation with `run_release_soak=true` or `release_profile=full` sets `published_upgrade_survivor_baselines='last-stable-4 2026.4.23 2026.5.2 2026.4.15'` and `published_upgrade_survivor_scenarios=reported-issues` to expand across the four latest stable npm releases plus pinned plugin-compatibility boundary releases and issue-shaped fixtures for Feishu config, preserved bootstrap/persona files, configured OpenClaw plugin installs, tilde log paths, and stale legacy plugin dependency roots. Multi-baseline published-upgrade survivor selections are sharded by baseline into separate targeted Docker runner jobs. The separate `Update Migration` workflow uses the `update-migration` Docker lane with `all-since-2026.4.23` and `plugin-deps-cleanup` when the question is exhaustive published update cleanup, not normal Full Release CI breadth. Local aggregate runs can pass exact package specs with `OPENCLAW_UPGRADE_SURVIVOR_BASELINE_SPECS`, keep a single lane with `OPENCLAW_UPGRADE_SURVIVOR_BASELINE_SPEC` such as `openclaw@2026.4.15`, or set `OPENCLAW_UPGRADE_SURVIVOR_SCENARIOS` for the scenario matrix. The published lane configures the baseline with a baked `openclaw config set` command recipe, records recipe steps in `summary.json`, and probes `/healthz`, `/readyz`, plus RPC status after Gateway start. The Windows packaged and installer fresh lanes also verify that an installed package can import a browser-control override from a raw absolute Windows path. The OpenAI cross-OS agent-turn smoke defaults to `OPENCLAW_CROSS_OS_OPENAI_MODEL` when set, otherwise `openai/gpt-5.4`, so the install and gateway proof stays on a GPT-5 test model while avoiding GPT-4.x defaults. +Release checks call Package Acceptance with `source=artifact`, the prepared release package artifact, `suite_profile=custom`, `docker_lanes='doctor-switch update-channel-switch skill-install update-corrupt-plugin upgrade-survivor published-upgrade-survivor update-restart-auth plugins-offline plugin-update'`, and `telegram_mode=mock-openai`. This keeps package migration, update, live ClawHub skill install, stale-plugin-dependency cleanup, configured-plugin install repair, offline plugin, plugin-update, and Telegram proof on the same resolved package tarball. Set `package_acceptance_package_spec` on Full Release Validation or OpenClaw Release Checks to run that same matrix against a shipped npm package instead of the SHA-built artifact. Cross-OS release checks still cover OS-specific onboarding, installer, and platform behavior; package/update product validation should start with Package Acceptance. The `published-upgrade-survivor` Docker lane validates one published package baseline per run in the blocking release path. In Package Acceptance, the resolved `package-under-test` tarball is always the candidate and `published_upgrade_survivor_baseline` selects the fallback published baseline, defaulting to `openclaw@latest`; failed-lane rerun commands preserve that baseline. Full Release Validation with `run_release_soak=true` or `release_profile=full` sets `published_upgrade_survivor_baselines='last-stable-4 2026.4.23 2026.5.2 2026.4.15'` and `published_upgrade_survivor_scenarios=reported-issues` to expand across the four latest stable npm releases plus pinned plugin-compatibility boundary releases and issue-shaped fixtures for Feishu config, preserved bootstrap/persona files, configured OpenClaw plugin installs, tilde log paths, and stale legacy plugin dependency roots. Multi-baseline published-upgrade survivor selections are sharded by baseline into separate targeted Docker runner jobs. The separate `Update Migration` workflow uses the `update-migration` Docker lane with `all-since-2026.4.23` and `plugin-deps-cleanup` when the question is exhaustive published update cleanup, not normal Full Release CI breadth. Local aggregate runs can pass exact package specs with `OPENCLAW_UPGRADE_SURVIVOR_BASELINE_SPECS`, keep a single lane with `OPENCLAW_UPGRADE_SURVIVOR_BASELINE_SPEC` such as `openclaw@2026.4.15`, or set `OPENCLAW_UPGRADE_SURVIVOR_SCENARIOS` for the scenario matrix. The published lane configures the baseline with a baked `openclaw config set` command recipe, records recipe steps in `summary.json`, and probes `/healthz`, `/readyz`, plus RPC status after Gateway start. The Windows packaged and installer fresh lanes also verify that an installed package can import a browser-control override from a raw absolute Windows path. The OpenAI cross-OS agent-turn smoke defaults to `OPENCLAW_CROSS_OS_OPENAI_MODEL` when set, otherwise `openai/gpt-5.4`, so the install and gateway proof stays on a GPT-5 test model while avoiding GPT-4.x defaults. ### Legacy compatibility windows diff --git a/docs/gateway/configuration-examples.md b/docs/gateway/configuration-examples.md index 00aa40c38bb..0951d887204 100644 --- a/docs/gateway/configuration-examples.md +++ b/docs/gateway/configuration-examples.md @@ -452,6 +452,7 @@ Save to `~/.openclaw/openclaw.json` and you can DM the bot from that number. install: { preferBrew: true, nodeManager: "npm", // npm | pnpm | yarn | bun + allowUploadedArchives: false, }, entries: { "image-lab": { diff --git a/docs/gateway/configuration-reference.md b/docs/gateway/configuration-reference.md index a3a2acd2ec3..e55592ba5ba 100644 --- a/docs/gateway/configuration-reference.md +++ b/docs/gateway/configuration-reference.md @@ -138,6 +138,7 @@ See [MCP](/cli/mcp#openclaw-as-an-mcp-client-registry) and install: { preferBrew: true, nodeManager: "npm", // npm | pnpm | yarn | bun + allowUploadedArchives: false, }, entries: { "image-lab": { @@ -159,6 +160,10 @@ See [MCP](/cli/mcp#openclaw-as-an-mcp-client-registry) and available before falling back to other installer kinds. - `install.nodeManager`: node installer preference for `metadata.openclaw.install` specs (`npm` | `pnpm` | `yarn` | `bun`). +- `install.allowUploadedArchives`: allow trusted `operator.admin` Gateway + clients to install private zip archives staged through `skills.upload.*` + (default: false). This only enables the uploaded-archive path; normal ClawHub + installs do not require it. - `entries..enabled: false` disables a skill even if bundled/installed. - `entries..apiKey`: convenience for skills declaring a primary env var (plaintext string or SecretRef object). diff --git a/docs/gateway/protocol.md b/docs/gateway/protocol.md index b204672c303..e73d2488913 100644 --- a/docs/gateway/protocol.md +++ b/docs/gateway/protocol.md @@ -570,9 +570,28 @@ terminal summary, and sanitized error text. sanitized install options without exposing raw secret values. - Operators may call `skills.search` and `skills.detail` (`operator.read`) for ClawHub discovery metadata. -- Operators may call `skills.install` (`operator.admin`) in two modes: +- Operators may call `skills.upload.begin`, `skills.upload.chunk`, and + `skills.upload.commit` (`operator.admin`) to stage a private skill archive + before installing it. This is a separate admin upload path for trusted clients, + not the normal ClawHub skill install flow, and is disabled by default unless + `skills.install.allowUploadedArchives` is enabled. + - `skills.upload.begin({ kind: "skill-archive", slug, sizeBytes, sha256?, force?, idempotencyKey? })` + creates an upload bound to that slug and force value. + - `skills.upload.chunk({ uploadId, offset, dataBase64 })` appends bytes at + the exact decoded offset. + - `skills.upload.commit({ uploadId, sha256? })` verifies the final size and + SHA-256. Commit only finalizes the upload; it does not install the skill. + - Uploaded skill archives are zip archives containing a `SKILL.md` root. The + archive's internal directory name never selects the install target. +- Operators may call `skills.install` (`operator.admin`) in three modes: - ClawHub mode: `{ source: "clawhub", slug, version?, force? }` installs a skill folder into the default agent workspace `skills/` directory. + - Upload mode: `{ source: "upload", uploadId, slug, force?, sha256?, timeoutMs? }` + installs a committed upload into the default agent workspace `skills/` + directory. The slug and force value must match the original + `skills.upload.begin` request. This mode is rejected unless + `skills.install.allowUploadedArchives` is enabled. The setting does not + affect ClawHub installs. - Gateway installer mode: `{ name, installId, dangerouslyForceUnsafeInstall?, timeoutMs? }` runs a declared `metadata.openclaw.install` action on the gateway host. - Operators may call `skills.update` (`operator.admin`) in two modes: diff --git a/docs/help/testing.md b/docs/help/testing.md index 1ef3e52d5b1..75c0a5cd8b6 100644 --- a/docs/help/testing.md +++ b/docs/help/testing.md @@ -652,7 +652,7 @@ These Docker runners split into two buckets: - `Package Acceptance` is the GitHub-native package gate for "does this installable tarball work as a product?" It resolves one candidate package from `source=npm`, `source=ref`, `source=url`, or `source=artifact`, uploads it as `package-under-test`, then runs the reusable Docker E2E lanes against that exact tarball instead of repacking the selected ref. Profiles are ordered by breadth: `smoke`, `package`, `product`, and `full`. See [Testing updates and plugins](/help/testing-updates-plugins) for the package/update/plugin contract, published-upgrade survivor matrix, release defaults, and failure triage. - Build and release checks run `scripts/check-cli-bootstrap-imports.mjs` after tsdown. The guard walks the static built graph from `dist/entry.js` and `dist/cli/run-main.js` and fails if pre-dispatch startup imports package dependencies such as Commander, prompt UI, undici, or logging before command dispatch; it also keeps the bundled gateway run chunk under budget and rejects static imports of known cold gateway paths. Packaged CLI smoke also covers root help, onboard help, doctor help, status, config schema, and a model-list command. - Package Acceptance legacy compatibility is capped at `2026.4.25` (`2026.4.25-beta.*` included). Through that cutoff, the harness tolerates only shipped-package metadata gaps: omitted private QA inventory entries, missing `gateway install --wrapper`, missing patch files in the tarball-derived git fixture, missing persisted `update.channel`, legacy plugin install-record locations, missing marketplace install-record persistence, and config metadata migration during `plugins update`. For packages after `2026.4.25`, those paths are strict failures. -- Container smoke runners: `test:docker:openwebui`, `test:docker:onboard`, `test:docker:npm-onboard-channel-agent`, `test:docker:update-channel-switch`, `test:docker:upgrade-survivor`, `test:docker:published-upgrade-survivor`, `test:docker:session-runtime-context`, `test:docker:agents-delete-shared-workspace`, `test:docker:gateway-network`, `test:docker:browser-cdp-snapshot`, `test:docker:mcp-channels`, `test:docker:pi-bundle-mcp-tools`, `test:docker:cron-mcp-cleanup`, `test:docker:plugins`, `test:docker:plugin-update`, `test:docker:plugin-lifecycle-matrix`, and `test:docker:config-reload` boot one or more real containers and verify higher-level integration paths. +- Container smoke runners: `test:docker:openwebui`, `test:docker:onboard`, `test:docker:npm-onboard-channel-agent`, `test:docker:skill-install`, `test:docker:update-channel-switch`, `test:docker:upgrade-survivor`, `test:docker:published-upgrade-survivor`, `test:docker:session-runtime-context`, `test:docker:agents-delete-shared-workspace`, `test:docker:gateway-network`, `test:docker:browser-cdp-snapshot`, `test:docker:mcp-channels`, `test:docker:pi-bundle-mcp-tools`, `test:docker:cron-mcp-cleanup`, `test:docker:plugins`, `test:docker:plugin-update`, `test:docker:plugin-lifecycle-matrix`, and `test:docker:config-reload` boot one or more real containers and verify higher-level integration paths. The live-model Docker runners also bind-mount only the needed CLI auth homes (or all supported ones when the run is not narrowed), then copy them into the container home before the run so external-CLI OAuth can refresh tokens without mutating the host auth store: @@ -665,6 +665,7 @@ The live-model Docker runners also bind-mount only the needed CLI auth homes (or - Open WebUI live smoke: `pnpm test:docker:openwebui` (script: `scripts/e2e/openwebui-docker.sh`) - Onboarding wizard (TTY, full scaffolding): `pnpm test:docker:onboard` (script: `scripts/e2e/onboard-docker.sh`) - Npm tarball onboarding/channel/agent smoke: `pnpm test:docker:npm-onboard-channel-agent` installs the packed OpenClaw tarball globally in Docker, configures OpenAI via env-ref onboarding plus Telegram by default, runs doctor, and runs one mocked OpenAI agent turn. Reuse a prebuilt tarball with `OPENCLAW_CURRENT_PACKAGE_TGZ=/path/to/openclaw-*.tgz`, skip the host rebuild with `OPENCLAW_NPM_ONBOARD_HOST_BUILD=0`, or switch channel with `OPENCLAW_NPM_ONBOARD_CHANNEL=discord` or `OPENCLAW_NPM_ONBOARD_CHANNEL=slack`. +- Skill install smoke: `pnpm test:docker:skill-install` installs the packed OpenClaw tarball globally in Docker, disables uploaded archive installs in config, resolves the current live ClawHub skill slug from search, installs it with `openclaw skills install`, and verifies the installed skill plus `.clawhub` origin/lock metadata. - Update channel switch smoke: `pnpm test:docker:update-channel-switch` installs the packed OpenClaw tarball globally in Docker, switches from package `stable` to git `dev`, verifies the persisted channel and plugin post-update work, then switches back to package `stable` and checks update status. - Upgrade survivor smoke: `pnpm test:docker:upgrade-survivor` installs the packed OpenClaw tarball over a dirty old-user fixture with agents, channel config, plugin allowlists, stale plugin dependency state, and existing workspace/session files. It runs package update plus non-interactive doctor without live provider or channel keys, then starts a loopback Gateway and checks config/state preservation plus startup/status budgets. - Published upgrade survivor smoke: `pnpm test:docker:published-upgrade-survivor` installs `openclaw@latest` by default, seeds realistic existing-user files, configures that baseline with a baked command recipe, validates the resulting config, updates that published install to the candidate tarball, runs non-interactive doctor, writes `.artifacts/upgrade-survivor/summary.json`, then starts a loopback Gateway and checks configured intents, state preservation, startup, `/healthz`, `/readyz`, and RPC status budgets. Override one baseline with `OPENCLAW_UPGRADE_SURVIVOR_BASELINE_SPEC`, ask the aggregate scheduler to expand exact local baselines with `OPENCLAW_UPGRADE_SURVIVOR_BASELINE_SPECS` such as `openclaw@2026.5.2 openclaw@2026.4.23 openclaw@2026.4.15`, and expand issue-shaped fixtures with `OPENCLAW_UPGRADE_SURVIVOR_SCENARIOS` such as `reported-issues`; the reported-issues set includes `configured-plugin-installs` for automatic external OpenClaw plugin install repair. Package Acceptance exposes those as `published_upgrade_survivor_baseline`, `published_upgrade_survivor_baselines`, and `published_upgrade_survivor_scenarios`, resolves meta baseline tokens such as `last-stable-4` or `all-since-2026.4.23`, and Full Release Validation expands the release-soak package gate to `last-stable-4 2026.4.23 2026.5.2 2026.4.15` plus `reported-issues`. diff --git a/docs/reference/RELEASING.md b/docs/reference/RELEASING.md index 99097926743..12a2edfef95 100644 --- a/docs/reference/RELEASING.md +++ b/docs/reference/RELEASING.md @@ -508,9 +508,9 @@ Supported candidate sources: `OpenClaw Release Checks` runs Package Acceptance with `source=artifact`, the prepared release package artifact, `suite_profile=custom`, -`docker_lanes=doctor-switch update-channel-switch upgrade-survivor published-upgrade-survivor update-restart-auth plugins-offline plugin-update`, +`docker_lanes=doctor-switch update-channel-switch skill-install update-corrupt-plugin upgrade-survivor published-upgrade-survivor update-restart-auth plugins-offline plugin-update`, `telegram_mode=mock-openai`. Package Acceptance keeps migration, update, -configured-auth update restart, stale plugin dependency cleanup, offline plugin +configured-auth update restart, live ClawHub skill install, stale plugin dependency cleanup, offline plugin fixtures, plugin update, and Telegram package QA against the same resolved tarball. Blocking release checks use the default latest published package baseline; `run_release_soak=true` or @@ -559,8 +559,8 @@ Common package profiles: - `smoke`: quick package install/channel/agent, gateway network, and config reload lanes -- `package`: install/update/restart/plugin package contracts without live - ClawHub; this is the release-check default +- `package`: install/update/restart/plugin package contracts plus live ClawHub + skill install proof; this is the release-check default - `product`: `package` plus MCP channels, cron/subagent cleanup, OpenAI web search, and OpenWebUI - `full`: Docker release-path chunks with OpenWebUI diff --git a/docs/reference/test.md b/docs/reference/test.md index c498985679a..c1e916de54e 100644 --- a/docs/reference/test.md +++ b/docs/reference/test.md @@ -40,6 +40,7 @@ title: "Tests" - `pnpm test:live`: Runs provider live tests (minimax/zai). Requires API keys and `LIVE=1` (or provider-specific `*_LIVE_TEST=1`) to unskip. - `pnpm test:docker:all`: Builds the shared live-test image, packs OpenClaw once as an npm tarball, builds/reuses a bare Node/Git runner image plus a functional image that installs that tarball into `/app`, then runs Docker smoke lanes with `OPENCLAW_SKIP_DOCKER_BUILD=1` through a weighted scheduler. The bare image (`OPENCLAW_DOCKER_E2E_BARE_IMAGE`) is used for installer/update/plugin-dependency lanes; those lanes mount the prebuilt tarball instead of using copied repo sources. The functional image (`OPENCLAW_DOCKER_E2E_FUNCTIONAL_IMAGE`) is used for normal built-app functionality lanes. `scripts/package-openclaw-for-docker.mjs` is the single local/CI package packer and validates the tarball plus `dist/postinstall-inventory.json` before Docker consumes it. Docker lane definitions live in `scripts/lib/docker-e2e-scenarios.mjs`; planner logic lives in `scripts/lib/docker-e2e-plan.mjs`; `scripts/test-docker-all.mjs` executes the selected plan. `node scripts/test-docker-all.mjs --plan-json` emits the scheduler-owned CI plan for selected lanes, image kinds, package/live-image needs, state scenarios, and credential checks without building or running Docker. `OPENCLAW_DOCKER_ALL_PARALLELISM=` controls process slots and defaults to 10; `OPENCLAW_DOCKER_ALL_TAIL_PARALLELISM=` controls the provider-sensitive tail pool and defaults to 10. Heavy lane caps default to `OPENCLAW_DOCKER_ALL_LIVE_LIMIT=9`, `OPENCLAW_DOCKER_ALL_NPM_LIMIT=10`, and `OPENCLAW_DOCKER_ALL_SERVICE_LIMIT=7`; provider caps default to one heavy lane per provider via `OPENCLAW_DOCKER_ALL_LIVE_CLAUDE_LIMIT=4`, `OPENCLAW_DOCKER_ALL_LIVE_CODEX_LIMIT=4`, and `OPENCLAW_DOCKER_ALL_LIVE_GEMINI_LIMIT=4`. Use `OPENCLAW_DOCKER_ALL_WEIGHT_LIMIT` or `OPENCLAW_DOCKER_ALL_DOCKER_LIMIT` for larger hosts. If one lane exceeds the effective weight or resource cap on a low-parallelism host, it can still start from an empty pool and will run alone until it releases capacity. Lane starts are staggered by 2 seconds by default to avoid local Docker daemon create storms; override with `OPENCLAW_DOCKER_ALL_START_STAGGER_MS=`. The runner preflights Docker by default, cleans stale OpenClaw E2E containers, emits active-lane status every 30 seconds, shares provider CLI tool caches between compatible lanes, retries transient live-provider failures once by default (`OPENCLAW_DOCKER_ALL_LIVE_RETRIES=`), and stores lane timings in `.artifacts/docker-tests/lane-timings.json` for longest-first ordering on later runs. Use `OPENCLAW_DOCKER_ALL_DRY_RUN=1` to print the lane manifest without running Docker, `OPENCLAW_DOCKER_ALL_STATUS_INTERVAL_MS=` to tune status output, or `OPENCLAW_DOCKER_ALL_TIMINGS=0` to disable timing reuse. Use `OPENCLAW_DOCKER_ALL_LIVE_MODE=skip` for deterministic/local lanes only or `OPENCLAW_DOCKER_ALL_LIVE_MODE=only` for live-provider lanes only; package aliases are `pnpm test:docker:local:all` and `pnpm test:docker:live:all`. Live-only mode merges main and tail live lanes into one longest-first pool so provider buckets can pack Claude, Codex, and Gemini work together. The runner stops scheduling new pooled lanes after the first failure unless `OPENCLAW_DOCKER_ALL_FAIL_FAST=0` is set, and each lane has a 120-minute fallback timeout overrideable with `OPENCLAW_DOCKER_ALL_LANE_TIMEOUT_MS`; selected live/tail lanes use tighter per-lane caps. CLI backend Docker setup commands have their own timeout via `OPENCLAW_LIVE_CLI_BACKEND_SETUP_TIMEOUT_SECONDS` (default 180). Per-lane logs, `summary.json`, `failures.json`, and phase timings are written under `.artifacts/docker-tests//`; use `pnpm test:docker:timings ` to inspect slow lanes and `pnpm test:docker:rerun ` to print cheap targeted rerun commands. - `pnpm test:docker:browser-cdp-snapshot`: Builds a Chromium-backed source E2E container, starts raw CDP plus an isolated Gateway, runs `browser doctor --deep`, and verifies CDP role snapshots include link URLs, cursor-promoted clickables, iframe refs, and frame metadata. +- `pnpm test:docker:skill-install`: Installs the packed OpenClaw tarball in a bare Docker runner, disables `skills.install.allowUploadedArchives`, resolves a current skill slug from live ClawHub search, installs it through `openclaw skills install`, and verifies `SKILL.md`, `.clawhub/origin.json`, `.clawhub/lock.json`, and `skills info --json`. - CLI backend live Docker probes can be run as focused lanes, for example `pnpm test:docker:live-cli-backend:codex`, `pnpm test:docker:live-cli-backend:codex:resume`, or `pnpm test:docker:live-cli-backend:codex:mcp`. Claude and Gemini have matching `:resume` and `:mcp` aliases. - `pnpm test:docker:openwebui`: Starts Dockerized OpenClaw + Open WebUI, signs in through Open WebUI, checks `/api/models`, then runs a real proxied chat through `/api/chat/completions`. Requires a usable live model key (for example OpenAI in `~/.profile`), pulls an external Open WebUI image, and is not expected to be CI-stable like the normal unit/e2e suites. - `pnpm test:docker:mcp-channels`: Starts a seeded Gateway container and a second client container that spawns `openclaw mcp serve`, then verifies routed conversation discovery, transcript reads, attachment metadata, live event queue behavior, outbound send routing, and Claude-style channel + permission notifications over the real stdio bridge. The Claude notification assertion reads the raw stdio MCP frames directly so the smoke reflects what the bridge actually emits. diff --git a/docs/tools/skills-config.md b/docs/tools/skills-config.md index b94e9b6dbac..52bd384b316 100644 --- a/docs/tools/skills-config.md +++ b/docs/tools/skills-config.md @@ -23,6 +23,7 @@ Most skills loader/install configuration lives under `skills` in install: { preferBrew: true, nodeManager: "npm", // npm | pnpm | yarn | bun (Gateway runtime still Node; bun not recommended) + allowUploadedArchives: false, }, entries: { "image-lab": { @@ -101,6 +102,10 @@ Rules: - `openclaw setup --node-manager` is narrower and currently accepts `npm`, `pnpm`, or `bun`. Set `skills.install.nodeManager: "yarn"` manually if you want Yarn-backed skill installs. +- `install.allowUploadedArchives`: allow trusted `operator.admin` Gateway + clients to install private zip archives staged through `skills.upload.*` + (default: false). This only enables the uploaded-archive path; normal ClawHub + installs do not require it. - `entries.`: per-skill overrides. - `agents.defaults.skills`: optional default skill allowlist inherited by agents that omit `agents.list[].skills`. diff --git a/docs/tools/skills.md b/docs/tools/skills.md index 2f4d301d533..c9084a2dcf1 100644 --- a/docs/tools/skills.md +++ b/docs/tools/skills.md @@ -142,6 +142,17 @@ Configured skill roots also support one grouping level, such as `skills///SKILL.md`, so related third-party skills can be kept under a shared folder without broad recursive scanning. +Gateway clients that need private, non-ClawHub delivery can stage a zip skill +archive with `skills.upload.begin`, `skills.upload.chunk`, and +`skills.upload.commit`, then install the committed upload with +`skills.install({ source: "upload", uploadId, slug, force?, sha256? })`. This is +an explicit admin upload path for trusted clients, not the normal +`openclaw skills install ` or ClawHub install flow. It is off by default +and only works when `skills.install.allowUploadedArchives: true` is set in +`openclaw.json`. Upload mode still installs into the default agent workspace +`skills/` directory; the archive's internal folder name is ignored for the +final install target. + ClawHub skill pages expose the latest security scan state before install, with scanner detail pages for VirusTotal, ClawScan, and static analysis. `openclaw skills install ` remains only the install path; publishers @@ -157,6 +168,12 @@ Prefer sandboxed runs for untrusted inputs and risky tools. See - Workspace and extra-dir skill discovery only accepts skill roots and `SKILL.md` files whose resolved realpath stays inside the configured root. +- Gateway private archive installs are off by default. When explicitly enabled, + they require a committed zip upload containing `SKILL.md` and reuse the same + archive extraction, path traversal, symlink, force, and rollback protections as + ClawHub skill installs. They are gated by + `skills.install.allowUploadedArchives`; normal ClawHub installs do not require + that setting. - Gateway-backed skill dependency installs (`skills.install`, onboarding, and the Skills settings UI) run the built-in dangerous-code scanner before executing installer metadata. `critical` findings block by default unless the caller explicitly sets the dangerous override; suspicious findings still warn only. - `openclaw skills install ` is different - it downloads a ClawHub skill folder into the workspace and does not use the installer-metadata path above. - `skills.entries.*.env` and `skills.entries.*.apiKey` inject secrets into the **host** process for that agent turn (not the sandbox). Keep secrets out of prompts and logs. diff --git a/package.json b/package.json index fcad827b19f..85e898db278 100644 --- a/package.json +++ b/package.json @@ -83,6 +83,7 @@ "scripts/lib/official-external-plugin-catalog.json", "scripts/lib/official-external-provider-catalog.json", "scripts/lib/package-dist-imports.mjs", + "scripts/lib/bundled-runtime-deps-install.mjs", "scripts/postinstall-bundled-plugins.mjs", "scripts/windows-cmd-helpers.mjs" ], @@ -1586,6 +1587,7 @@ "test:docker:qr": "bash scripts/e2e/qr-import-docker.sh", "test:docker:rerun": "node scripts/docker-e2e-rerun.mjs", "test:docker:session-runtime-context": "bash scripts/e2e/session-runtime-context-docker.sh", + "test:docker:skill-install": "bash scripts/e2e/skill-install-docker.sh", "test:docker:timings": "node scripts/docker-e2e-timings.mjs", "test:docker:update-channel-switch": "bash scripts/e2e/update-channel-switch-docker.sh", "test:docker:update-corrupt-plugin": "bash scripts/e2e/update-corrupt-plugin-docker.sh", diff --git a/scripts/e2e/lib/skills/clawhub-install-proof.sh b/scripts/e2e/lib/skills/clawhub-install-proof.sh new file mode 100644 index 00000000000..3f57a188972 --- /dev/null +++ b/scripts/e2e/lib/skills/clawhub-install-proof.sh @@ -0,0 +1,148 @@ +#!/usr/bin/env bash +# Live ClawHub skill install proof for package-backed Docker/Testbox lanes. +set -euo pipefail + +ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../.." && pwd)" +cd "$ROOT_DIR" + +source "$ROOT_DIR/scripts/lib/openclaw-e2e-instance.sh" + +OPENCLAW_TEST_STATE_SCRIPT_B64="${OPENCLAW_TEST_STATE_SCRIPT_B64:-}" +if [ -n "$OPENCLAW_TEST_STATE_SCRIPT_B64" ]; then + openclaw_e2e_eval_test_state_from_b64 "$OPENCLAW_TEST_STATE_SCRIPT_B64" +else + export HOME="$(mktemp -d "${TMPDIR:-/tmp}/openclaw-skill-install-home.XXXXXX")" + export USERPROFILE="$HOME" + export OPENCLAW_HOME="$HOME" + export OPENCLAW_STATE_DIR="$HOME/.openclaw" + export OPENCLAW_CONFIG_PATH="$OPENCLAW_STATE_DIR/openclaw.json" + mkdir -p "$OPENCLAW_STATE_DIR" +fi + +if [ -n "${OPENCLAW_CURRENT_PACKAGE_TGZ:-}" ]; then + export NPM_CONFIG_PREFIX="${NPM_CONFIG_PREFIX:-$HOME/.npm-global}" + export PATH="$NPM_CONFIG_PREFIX/bin:$PATH" + openclaw_e2e_install_package /tmp/openclaw-skill-install-npm.log +fi + +if [ -n "${OPENCLAW_CURRENT_PACKAGE_TGZ:-}" ] && command -v openclaw >/dev/null 2>&1; then + OPENCLAW_CMD=(openclaw) +elif command -v pnpm >/dev/null 2>&1 && [ -f package.json ]; then + if [ "${OPENCLAW_SKILL_INSTALL_E2E_BUILD_SOURCE:-0}" = "1" ]; then + pnpm build >/tmp/openclaw-skill-install-build.log 2>&1 + fi + OPENCLAW_CMD=(pnpm --silent openclaw) +elif command -v openclaw >/dev/null 2>&1; then + OPENCLAW_CMD=(openclaw) +else + echo "openclaw command not found; install package first or run from repo with pnpm" >&2 + exit 1 +fi + +mkdir -p "$(dirname "$OPENCLAW_CONFIG_PATH")" +node --input-type=module - "$OPENCLAW_CONFIG_PATH" <<'NODE' +import fs from "node:fs"; +const configPath = process.argv[2]; +let config = {}; +try { + config = JSON.parse(fs.readFileSync(configPath, "utf8")); +} catch {} +config.skills ??= {}; +config.skills.install ??= {}; +config.skills.install.allowUploadedArchives = false; +fs.writeFileSync(configPath, `${JSON.stringify(config, null, 2)}\n`); +NODE + +query="${OPENCLAW_SKILL_INSTALL_E2E_QUERY:-homeassistant}" +requested_slug="${OPENCLAW_SKILL_INSTALL_E2E_SLUG:-}" +preferred_slug="${OPENCLAW_SKILL_INSTALL_E2E_PREFERRED_SLUG:-homeassistant-skill}" +search_json="/tmp/openclaw-skill-install-search.json" +resolve_json="/tmp/openclaw-skill-install-resolved.json" +install_log="/tmp/openclaw-skill-install.log" +info_json="/tmp/openclaw-skill-install-info.json" + +echo "Searching live ClawHub skills for: $query" +"${OPENCLAW_CMD[@]}" skills search "$query" --limit 8 --json >"$search_json" + +node --input-type=module - "$search_json" "$resolve_json" "$requested_slug" "$preferred_slug" <<'NODE' +import fs from "node:fs"; +const [searchPath, resolvePath, requestedSlug, preferredSlug] = process.argv.slice(2); +const payload = JSON.parse(fs.readFileSync(searchPath, "utf8")); +const results = Array.isArray(payload) ? payload : Array.isArray(payload.results) ? payload.results : []; +const slugs = results.map((entry) => String(entry.slug ?? "")).filter(Boolean); +let chosen; +if (requestedSlug) { + chosen = results.find((entry) => entry.slug === requestedSlug); + if (!chosen) { + throw new Error(`Requested skill slug ${requestedSlug} not found. Search returned: ${slugs.join(", ") || "(none)"}`); + } +} else { + chosen = + results.find((entry) => entry.slug === preferredSlug) ?? + results.find((entry) => String(entry.slug ?? "").includes("homeassistant")) ?? + results[0]; +} +if (!chosen?.slug) { + throw new Error(`No installable skill slug found. Search returned: ${slugs.join(", ") || "(none)"}`); +} +fs.writeFileSync(resolvePath, `${JSON.stringify({ + slug: chosen.slug, + version: chosen.version ?? null, + displayName: chosen.displayName ?? chosen.name ?? chosen.slug, +})}\n`); +NODE + +slug="$(node -e 'process.stdout.write(JSON.parse(require("node:fs").readFileSync(process.argv[1], "utf8")).slug)' "$resolve_json")" +echo "Installing live ClawHub skill: $slug" +if ! "${OPENCLAW_CMD[@]}" skills install "$slug" --force >"$install_log" 2>&1; then + echo "Skill install failed" >&2 + openclaw_e2e_dump_logs /tmp/openclaw-skill-install-npm.log "$search_json" "$resolve_json" "$install_log" + exit 1 +fi + +workspace_dir="$HOME/.openclaw/workspace" +skill_dir="$workspace_dir/skills/$slug" +origin_json="$skill_dir/.clawhub/origin.json" +lock_json="$workspace_dir/.clawhub/lock.json" + +openclaw_e2e_assert_file "$skill_dir/SKILL.md" +openclaw_e2e_assert_file "$origin_json" +openclaw_e2e_assert_file "$lock_json" + +"${OPENCLAW_CMD[@]}" skills info "$slug" --json >"$info_json" + +node --input-type=module - "$OPENCLAW_CONFIG_PATH" "$skill_dir" "$origin_json" "$lock_json" "$info_json" "$slug" <<'NODE' +import fs from "node:fs"; +import path from "node:path"; +const [configPath, skillDir, originPath, lockPath, infoPath, slug] = process.argv.slice(2); +const read = (file) => JSON.parse(fs.readFileSync(file, "utf8")); +const config = read(configPath); +if (config.skills?.install?.allowUploadedArchives !== false) { + throw new Error("skills.install.allowUploadedArchives must remain false during ClawHub install proof"); +} +const origin = read(originPath); +if (origin.slug !== slug || origin.registry !== "https://clawhub.ai" || !origin.installedVersion) { + throw new Error(`Unexpected origin metadata: ${JSON.stringify(origin)}`); +} +const lock = read(lockPath); +if (lock.skills?.[slug]?.version !== origin.installedVersion) { + throw new Error(`Lockfile missing ${slug}@${origin.installedVersion}`); +} +const info = read(infoPath); +const infoFilePath = info.filePath ?? info.skill?.filePath; +const infoBaseDir = info.baseDir ?? info.skill?.baseDir; +if ( + info.skillKey !== slug && + (!infoFilePath || !path.resolve(infoFilePath).startsWith(path.resolve(skillDir))) +) { + throw new Error(`skills info did not report installed skill ${slug}: ${JSON.stringify(info)}`); +} +if (infoBaseDir && path.resolve(infoBaseDir) !== path.resolve(skillDir)) { + throw new Error(`skills info reported unexpected baseDir: ${infoBaseDir}`); +} +const skillText = fs.readFileSync(path.join(skillDir, "SKILL.md"), "utf8"); +if (!/^name:\s*/m.test(skillText)) { + throw new Error("Installed SKILL.md is missing frontmatter name"); +} +process.stdout.write(`E2E_OK installed=${slug} version=${origin.installedVersion} uploadArchives=false\n`); +NODE diff --git a/scripts/e2e/skill-install-docker.sh b/scripts/e2e/skill-install-docker.sh new file mode 100644 index 00000000000..26d6e604a42 --- /dev/null +++ b/scripts/e2e/skill-install-docker.sh @@ -0,0 +1,30 @@ +#!/usr/bin/env bash +# Installs a prepared OpenClaw npm tarball in Docker and proves live ClawHub +# skill install works while uploaded archive installs stay disabled. +set -euo pipefail + +ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)" +source "$ROOT_DIR/scripts/lib/docker-e2e-image.sh" +source "$ROOT_DIR/scripts/lib/docker-e2e-package.sh" + +IMAGE_NAME="$(docker_e2e_resolve_image "openclaw-skill-install-e2e" OPENCLAW_SKILL_INSTALL_E2E_IMAGE)" +PACKAGE_TGZ="$(docker_e2e_prepare_package_tgz skill-install "${OPENCLAW_CURRENT_PACKAGE_TGZ:-}")" +OPENCLAW_TEST_STATE_SCRIPT_B64="$(docker_e2e_test_state_shell_b64 skill-install empty)" + +docker_e2e_package_mount_args "$PACKAGE_TGZ" +docker_e2e_build_or_reuse "$IMAGE_NAME" skill-install "$ROOT_DIR/scripts/e2e/Dockerfile" "$ROOT_DIR" "bare" + +echo "Running live ClawHub skill install Docker E2E..." +docker_e2e_harness_mount_args +run_logged_print \ + skill-install-run \ + docker run --rm \ + "${DOCKER_E2E_HARNESS_ARGS[@]}" \ + -e COREPACK_ENABLE_DOWNLOAD_PROMPT=0 \ + -e "OPENCLAW_TEST_STATE_SCRIPT_B64=$OPENCLAW_TEST_STATE_SCRIPT_B64" \ + -e "OPENCLAW_SKILL_INSTALL_E2E_QUERY=${OPENCLAW_SKILL_INSTALL_E2E_QUERY:-homeassistant}" \ + -e "OPENCLAW_SKILL_INSTALL_E2E_SLUG=${OPENCLAW_SKILL_INSTALL_E2E_SLUG:-}" \ + -e "OPENCLAW_SKILL_INSTALL_E2E_PREFERRED_SLUG=${OPENCLAW_SKILL_INSTALL_E2E_PREFERRED_SLUG:-homeassistant-skill}" \ + "${DOCKER_E2E_PACKAGE_ARGS[@]}" \ + "$IMAGE_NAME" \ + bash scripts/e2e/lib/skills/clawhub-install-proof.sh diff --git a/scripts/lib/docker-e2e-scenarios.mjs b/scripts/lib/docker-e2e-scenarios.mjs index 3c1ad1a60db..851efc56987 100644 --- a/scripts/lib/docker-e2e-scenarios.mjs +++ b/scripts/lib/docker-e2e-scenarios.mjs @@ -258,6 +258,13 @@ export const mainLanes = [ weight: 3, }, ), + npmLane("skill-install", "OPENCLAW_SKIP_DOCKER_BUILD=1 pnpm test:docker:skill-install", { + retryPatterns: LIVE_RETRY_PATTERNS, + retries: 1, + stateScenario: "empty", + timeoutMs: 10 * 60 * 1000, + weight: 2, + }), npmLane("upgrade-survivor", upgradeSurvivorCommand, { stateScenario: "upgrade-survivor", timeoutMs: 20 * 60 * 1000, @@ -578,6 +585,13 @@ const releasePathPackageUpdateCoreLanes = [ weight: 3, }, ), + npmLane("skill-install", "OPENCLAW_SKIP_DOCKER_BUILD=1 pnpm test:docker:skill-install", { + retryPatterns: LIVE_RETRY_PATTERNS, + retries: 1, + stateScenario: "empty", + timeoutMs: 10 * 60 * 1000, + weight: 2, + }), npmLane("upgrade-survivor", upgradeSurvivorCommand, { stateScenario: "upgrade-survivor", timeoutMs: 20 * 60 * 1000, diff --git a/scripts/release-check.ts b/scripts/release-check.ts index 8bcf9d81752..f7f4f0a8f1b 100755 --- a/scripts/release-check.ts +++ b/scripts/release-check.ts @@ -65,6 +65,7 @@ const requiredPathGroups = [ "scripts/lib/official-external-channel-catalog.json", "scripts/lib/official-external-plugin-catalog.json", "scripts/lib/official-external-provider-catalog.json", + "scripts/lib/bundled-runtime-deps-install.mjs", "scripts/lib/package-dist-imports.mjs", "scripts/postinstall-bundled-plugins.mjs", "dist/plugin-sdk/compat.js", diff --git a/src/agents/pi-embedded-runner/extra-params.ts b/src/agents/pi-embedded-runner/extra-params.ts index 9d0691367eb..cb330470aa7 100644 --- a/src/agents/pi-embedded-runner/extra-params.ts +++ b/src/agents/pi-embedded-runner/extra-params.ts @@ -14,6 +14,7 @@ import type { ProviderRuntimeModel } from "../../plugins/provider-runtime-model. import { legacyModelKey, modelKey } from "../model-selection-normalize.js"; import { supportsGptParallelToolCallsPayload } from "../provider-api-families.js"; import { resolveProviderRequestPolicyConfig } from "../provider-request-config.js"; +import type { AgentRuntimeTransport } from "../runtime-plan/types.js"; import { createGoogleThinkingPayloadWrapper } from "./google-stream-wrappers.js"; import { log } from "./logger.js"; import { createMinimaxThinkingDisabledWrapper } from "./minimax-stream-wrappers.js"; @@ -131,7 +132,7 @@ type CacheRetentionStreamOptions = Partial & { cacheRetention?: "none" | "short" | "long"; cachedContent?: string; }; -export type SupportedTransport = "sse" | "websocket" | "auto"; +export type SupportedTransport = AgentRuntimeTransport; function resolveSupportedTransport(value: unknown): SupportedTransport | undefined { return value === "sse" || value === "websocket" || value === "auto" ? value : undefined; diff --git a/src/agents/skills-archive-install.test.ts b/src/agents/skills-archive-install.test.ts new file mode 100644 index 00000000000..86942bbc320 --- /dev/null +++ b/src/agents/skills-archive-install.test.ts @@ -0,0 +1,126 @@ +import fs from "node:fs/promises"; +import path from "node:path"; +import JSZip from "jszip"; +import { afterEach, describe, expect, it } from "vitest"; +import { withExtractedArchiveRoot } from "../infra/install-flow.js"; +import { createTrackedTempDirs } from "../test-utils/tracked-temp-dirs.js"; +import { + CLAWHUB_SKILL_ARCHIVE_ROOT_MARKERS, + installExtractedSkillRoot, +} from "./skills-archive-install.js"; + +const tempDirs = createTrackedTempDirs(); + +async function writeZipArchive(params: { + archivePath: string; + entries: Record; +}): Promise { + const zip = new JSZip(); + for (const [entryPath, content] of Object.entries(params.entries)) { + zip.file(entryPath, content); + } + await fs.writeFile( + params.archivePath, + Buffer.from(await zip.generateAsync({ type: "nodebuffer" })), + ); +} + +async function isCaseSensitiveFileSystem(root: string): Promise { + const marker = path.join(root, "case-check"); + await fs.writeFile(marker, "case", "utf8"); + const upperExists = await fs + .stat(path.join(root, "CASE-CHECK")) + .then(() => true) + .catch(() => false); + return !upperExists; +} + +async function expectFlatRootMarkerRejected(params: { + marker: string; + root: string; +}): Promise { + const archivePath = path.join(params.root, `flat-${params.marker}.zip`); + await writeZipArchive({ + archivePath, + entries: { + [params.marker]: skillFileContent("Flat Legacy Marker"), + }, + }); + + const result = await withExtractedArchiveRoot({ + archivePath, + tempDirPrefix: "openclaw-skill-clawhub-test-", + timeoutMs: 120_000, + rootMarkers: ["SKILL.md"], + onExtracted: async () => ({ ok: true as const }), + }); + + expect(result).toMatchObject({ + ok: false, + error: expect.stringContaining("unexpected archive layout"), + }); +} + +function skillFileContent(name: string): string { + return ["---", `name: ${name}`, "description: Test skill", "---", "", "# Test", ""].join("\n"); +} + +afterEach(async () => { + await tempDirs.cleanup(); +}); + +describe("skill archive install", () => { + it.each(["skill.md", "skills.md", "SKILL.MD"])( + "installs a single-root ClawHub archive with legacy marker %s", + async (marker) => { + const root = await tempDirs.make("openclaw-skill-archive-install-"); + const archivePath = path.join(root, "legacy.zip"); + const workspaceDir = path.join(root, "workspace"); + await writeZipArchive({ + archivePath, + entries: { + [`mydir/${marker}`]: skillFileContent("Legacy Marker"), + }, + }); + + const result = await withExtractedArchiveRoot({ + archivePath, + tempDirPrefix: "openclaw-skill-clawhub-test-", + timeoutMs: 120_000, + rootMarkers: CLAWHUB_SKILL_ARCHIVE_ROOT_MARKERS, + onExtracted: async (extractedRoot) => + await installExtractedSkillRoot({ + workspaceDir, + slug: `legacy-${marker.toLowerCase().replace(/[^a-z0-9]+/g, "-")}`, + extractedRoot, + mode: "install", + scan: false, + rootMarkers: CLAWHUB_SKILL_ARCHIVE_ROOT_MARKERS, + }), + }); + + expect(result.ok).toBe(true); + if (!result.ok) { + return; + } + await expect(fs.readFile(path.join(result.targetDir, marker), "utf8")).resolves.toContain( + "Legacy Marker", + ); + }, + ); + + it("keeps flat-root non-SKILL.md legacy markers rejected by strict packed-root resolution", async () => { + const root = await tempDirs.make("openclaw-skill-archive-install-"); + await expectFlatRootMarkerRejected({ marker: "skills.md", root }); + }); + + it("keeps flat-root lowercase skill.md rejected by strict packed-root resolution on case-sensitive filesystems", async () => { + const root = await tempDirs.make("openclaw-skill-archive-install-"); + const caseSensitive = await isCaseSensitiveFileSystem(root); + if (!caseSensitive) { + expect(caseSensitive).toBe(false); + return; + } + await expectFlatRootMarkerRejected({ marker: "skill.md", root }); + }); +}); diff --git a/src/agents/skills-archive-install.ts b/src/agents/skills-archive-install.ts new file mode 100644 index 00000000000..96d0947f1b9 --- /dev/null +++ b/src/agents/skills-archive-install.ts @@ -0,0 +1,231 @@ +import path from "node:path"; +import type { ArchiveLogger } from "../infra/archive.js"; +import { formatErrorMessage } from "../infra/errors.js"; +import { pathExists } from "../infra/fs-safe.js"; +import { withExtractedArchiveRoot } from "../infra/install-flow.js"; +import { installPackageDir } from "../infra/install-package-dir.js"; +import { resolveSafeInstallDir } from "../infra/install-safe-path.js"; +import { + scanSkillInstallSource, + type InstallSecurityScanResult, +} from "../plugins/install-security-scan.js"; + +const VALID_SLUG_PATTERN = /^[a-z0-9](?:[a-z0-9-]*[a-z0-9])?$/i; +const DEFAULT_SKILL_ARCHIVE_ROOT_MARKERS = ["SKILL.md"] as const; +export const CLAWHUB_SKILL_ARCHIVE_ROOT_MARKERS = [ + "SKILL.md", + "skill.md", + "skills.md", + "SKILL.MD", +] as const; + +function hasNonAscii(value: string): boolean { + for (const char of value) { + if (char.charCodeAt(0) > 0x7f) { + return true; + } + } + return false; +} + +type SkillArchiveInstallScan = + | false + | { + dangerouslyForceUnsafeInstall?: boolean; + installId?: string; + origin: string; + }; + +export type SkillArchiveInstallResult = + | { ok: true; targetDir: string } + | { ok: false; error: string; failureKind: SkillArchiveInstallFailureKind }; + +export type SkillArchiveInstallFailureKind = "invalid-request" | "unavailable"; + +export function normalizeTrackedSkillSlug(raw: string): string { + const slug = raw.trim(); + if (!slug || slug.includes("/") || slug.includes("\\") || slug.includes("..")) { + throw new Error(`Invalid skill slug: ${raw}`); + } + return slug; +} + +export function validateRequestedSkillSlug(raw: string): string { + const slug = normalizeTrackedSkillSlug(raw); + if (hasNonAscii(slug) || !VALID_SLUG_PATTERN.test(slug)) { + throw new Error(`Invalid skill slug: ${raw}`); + } + return slug; +} + +export function resolveWorkspaceSkillInstallDir(workspaceDir: string, slug: string): string { + const skillsDir = path.join(path.resolve(workspaceDir), "skills"); + const target = resolveSafeInstallDir({ + baseDir: skillsDir, + id: slug, + invalidNameMessage: "invalid skill target path", + }); + if (!target.ok) { + throw new Error(target.error); + } + return target.path; +} + +function installFailure( + error: string, + failureKind: SkillArchiveInstallFailureKind, +): SkillArchiveInstallResult { + return { ok: false, error, failureKind }; +} + +async function hasSkillArchiveRoot( + rootDir: string, + rootMarkers: readonly string[], +): Promise { + for (const candidate of rootMarkers) { + if (await pathExists(path.join(rootDir, candidate))) { + return true; + } + } + return false; +} + +function scanBlockedFailureKind( + blocked: NonNullable, +): SkillArchiveInstallFailureKind { + return blocked.code === "security_scan_failed" ? "unavailable" : "invalid-request"; +} + +const TRANSIENT_ARCHIVE_ERROR_PATTERNS = [ + "enoent", + "enospc", + "eio", + "eacces", + "eperm", + "ebusy", + "emfile", + "enfile", + "timeout", + "timed out", +] as const; + +function archiveFailureKind(error: string): SkillArchiveInstallFailureKind { + const lower = error.toLowerCase(); + if (lower.startsWith("failed to install skill:")) { + return "unavailable"; + } + for (const pattern of TRANSIENT_ARCHIVE_ERROR_PATTERNS) { + if (lower.includes(pattern)) { + return "unavailable"; + } + } + return "invalid-request"; +} + +export async function installExtractedSkillRoot(params: { + workspaceDir: string; + slug: string; + extractedRoot: string; + mode: "install" | "update"; + timeoutMs?: number; + logger?: ArchiveLogger; + scan?: SkillArchiveInstallScan; + rootMarkers?: readonly string[]; +}): Promise { + try { + if ( + !(await hasSkillArchiveRoot( + params.extractedRoot, + params.rootMarkers ?? DEFAULT_SKILL_ARCHIVE_ROOT_MARKERS, + )) + ) { + return installFailure("archive is missing SKILL.md", "invalid-request"); + } + let targetDir: string; + try { + targetDir = resolveWorkspaceSkillInstallDir(params.workspaceDir, params.slug); + } catch (err) { + return installFailure(formatErrorMessage(err), "invalid-request"); + } + if (params.mode === "install" && (await pathExists(targetDir))) { + return installFailure( + `Skill already exists at ${targetDir}. Re-run with force/update.`, + "invalid-request", + ); + } + + if (params.scan) { + const scanResult = await scanSkillInstallSource({ + dangerouslyForceUnsafeInstall: params.scan.dangerouslyForceUnsafeInstall, + installId: params.scan.installId ?? "archive", + logger: params.logger ?? {}, + origin: params.scan.origin, + skillName: params.slug, + sourceDir: params.extractedRoot, + }); + if (scanResult?.blocked) { + return installFailure( + scanResult.blocked.reason, + scanBlockedFailureKind(scanResult.blocked), + ); + } + } + + const install = await installPackageDir({ + sourceDir: params.extractedRoot, + targetDir, + mode: params.mode, + timeoutMs: params.timeoutMs ?? 120_000, + logger: params.logger, + copyErrorPrefix: "failed to install skill", + hasDeps: false, + depsLogMessage: "", + }); + if (!install.ok) { + return installFailure(install.error, "unavailable"); + } + return { ok: true, targetDir }; + } catch (err) { + return installFailure(formatErrorMessage(err), "unavailable"); + } +} + +export async function installSkillArchiveFromPath(params: { + archivePath: string; + workspaceDir: string; + slug: string; + force?: boolean; + timeoutMs?: number; + logger?: ArchiveLogger; + scan?: SkillArchiveInstallScan; +}): Promise { + const result = await withExtractedArchiveRoot({ + archivePath: params.archivePath, + tempDirPrefix: "openclaw-skill-archive-", + timeoutMs: params.timeoutMs ?? 120_000, + logger: params.logger, + rootMarkers: ["SKILL.md"], + onExtracted: async (rootDir) => + await installExtractedSkillRoot({ + workspaceDir: params.workspaceDir, + slug: params.slug, + extractedRoot: rootDir, + mode: params.force ? "update" : "install", + timeoutMs: params.timeoutMs, + logger: params.logger, + scan: params.scan, + }), + }); + if (!result.ok) { + const error = result.error.includes("unexpected archive layout") + ? "archive is missing SKILL.md" + : result.error; + const failureKind = + "failureKind" in result && + (result.failureKind === "invalid-request" || result.failureKind === "unavailable") + ? result.failureKind + : archiveFailureKind(error); + return installFailure(error, failureKind); + } + return result; +} diff --git a/src/agents/skills-clawhub.test.ts b/src/agents/skills-clawhub.test.ts index 74836ea04de..d74217a42c1 100644 --- a/src/agents/skills-clawhub.test.ts +++ b/src/agents/skills-clawhub.test.ts @@ -70,7 +70,7 @@ describe("skills-clawhub", () => { archiveCleanupMock.mockResolvedValue(undefined); searchClawHubSkillsMock.mockResolvedValue([]); withExtractedArchiveRootMock.mockImplementation(async (params) => { - expect(params.rootMarkers).toEqual(["SKILL.md"]); + expect(params.rootMarkers).toEqual(["SKILL.md", "skill.md", "skills.md", "SKILL.MD"]); return await params.onExtracted("/tmp/extracted-skill"); }); installPackageDirMock.mockResolvedValue({ @@ -104,6 +104,25 @@ describe("skills-clawhub", () => { expect(archiveCleanupMock).toHaveBeenCalledTimes(1); }); + it.each(["skill.md", "skills.md", "SKILL.MD"])( + "installs ClawHub archives whose packed root uses legacy marker %s", + async (marker) => { + pathExistsMock.mockImplementation(async (input: string) => input.endsWith(marker)); + + const result = await installSkillFromClawHub({ + workspaceDir: "/tmp/workspace", + slug: "agentreceipt", + }); + + expect(result).toMatchObject({ ok: true }); + expect(installPackageDirMock).toHaveBeenCalledWith( + expect.objectContaining({ + sourceDir: "/tmp/extracted-skill", + }), + ); + }, + ); + describe("legacy tracked slugs remain updatable", () => { async function createLegacyTrackedSkillFixture(slug: string) { const workspaceDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-skills-clawhub-")); diff --git a/src/agents/skills-clawhub.ts b/src/agents/skills-clawhub.ts index 75dc80886b3..54feb8902f9 100644 --- a/src/agents/skills-clawhub.ts +++ b/src/agents/skills-clawhub.ts @@ -10,9 +10,14 @@ import { import { formatErrorMessage } from "../infra/errors.js"; import { pathExists } from "../infra/fs-safe.js"; import { withExtractedArchiveRoot } from "../infra/install-flow.js"; -import { installPackageDir } from "../infra/install-package-dir.js"; -import { resolveSafeInstallDir } from "../infra/install-safe-path.js"; import { tryReadJson, writeJson } from "../infra/json-files.js"; +import { + CLAWHUB_SKILL_ARCHIVE_ROOT_MARKERS, + installExtractedSkillRoot, + normalizeTrackedSkillSlug, + resolveWorkspaceSkillInstallDir, + validateRequestedSkillSlug, +} from "./skills-archive-install.js"; const DOT_DIR = ".clawhub"; const LEGACY_DOT_DIR = ".clawdhub"; @@ -62,38 +67,18 @@ type Logger = { info?: (message: string) => void; }; -const VALID_SLUG_PATTERN = /^[a-z0-9](?:[a-z0-9-]*[a-z0-9])?$/i; -// eslint-disable-next-line no-control-regex -- detects any character outside printable ASCII -const NON_ASCII_PATTERN = /[^\x00-\x7F]/; - -function normalizeTrackedSlug(raw: string): string { - const slug = raw.trim(); - if (!slug || slug.includes("/") || slug.includes("\\") || slug.includes("..")) { - throw new Error(`Invalid skill slug: ${raw}`); - } - return slug; -} - -function validateRequestedSlug(raw: string): string { - const slug = normalizeTrackedSlug(raw); - if (NON_ASCII_PATTERN.test(slug) || !VALID_SLUG_PATTERN.test(slug)) { - throw new Error(`Invalid skill slug: ${raw}`); - } - return slug; -} - async function resolveRequestedUpdateSlug(params: { workspaceDir: string; requestedSlug: string; lock: ClawHubSkillsLockfile; }): Promise { - const trackedSlug = normalizeTrackedSlug(params.requestedSlug); - const trackedTargetDir = resolveSkillInstallDir(params.workspaceDir, trackedSlug); + const trackedSlug = normalizeTrackedSkillSlug(params.requestedSlug); + const trackedTargetDir = resolveWorkspaceSkillInstallDir(params.workspaceDir, trackedSlug); const trackedOrigin = await readClawHubSkillOrigin(trackedTargetDir); if (trackedOrigin || params.lock.skills[trackedSlug]) { return trackedSlug; } - return validateRequestedSlug(params.requestedSlug); + return validateRequestedSkillSlug(params.requestedSlug); } type ClawHubInstallParams = { @@ -118,29 +103,9 @@ type TrackedUpdateTarget = error: string; }; -function resolveSkillInstallDir(workspaceDir: string, slug: string): string { - const skillsDir = path.join(path.resolve(workspaceDir), "skills"); - const target = resolveSafeInstallDir({ - baseDir: skillsDir, - id: slug, - invalidNameMessage: "invalid skill target path", - }); - if (!target.ok) { - throw new Error(target.error); - } - return target.path; -} - -async function ensureSkillRoot(rootDir: string): Promise { - for (const candidate of ["SKILL.md", "skill.md", "skills.md", "SKILL.MD"]) { - if (await pathExists(path.join(rootDir, candidate))) { - return; - } - } - throw new Error("downloaded archive is missing SKILL.md"); -} - -async function readClawHubSkillsLockfile(workspaceDir: string): Promise { +export async function readClawHubSkillsLockfile( + workspaceDir: string, +): Promise { const candidates = [ path.join(workspaceDir, DOT_DIR, "lock.json"), path.join(workspaceDir, LEGACY_DOT_DIR, "lock.json"), @@ -235,31 +200,6 @@ async function resolveInstallVersion(params: { }; } -async function installExtractedSkill(params: { - workspaceDir: string; - slug: string; - extractedRoot: string; - mode: "install" | "update"; - logger?: Logger; -}): Promise<{ ok: true; targetDir: string } | { ok: false; error: string }> { - await ensureSkillRoot(params.extractedRoot); - const targetDir = resolveSkillInstallDir(params.workspaceDir, params.slug); - const install = await installPackageDir({ - sourceDir: params.extractedRoot, - targetDir, - mode: params.mode, - timeoutMs: 120_000, - logger: params.logger, - copyErrorPrefix: "failed to install skill", - hasDeps: false, - depsLogMessage: "", - }); - if (!install.ok) { - return install; - } - return { ok: true, targetDir }; -} - async function performClawHubSkillInstall( params: ClawHubInstallParams, ): Promise { @@ -269,7 +209,7 @@ async function performClawHubSkillInstall( version: params.version, baseUrl: params.baseUrl, }); - const targetDir = resolveSkillInstallDir(params.workspaceDir, params.slug); + const targetDir = resolveWorkspaceSkillInstallDir(params.workspaceDir, params.slug); if (!params.force && (await pathExists(targetDir))) { return { ok: false, @@ -288,18 +228,20 @@ async function performClawHubSkillInstall( archivePath: archive.archivePath, tempDirPrefix: "openclaw-skill-clawhub-", timeoutMs: 120_000, - rootMarkers: ["SKILL.md"], + rootMarkers: CLAWHUB_SKILL_ARCHIVE_ROOT_MARKERS, onExtracted: async (rootDir) => - await installExtractedSkill({ + await installExtractedSkillRoot({ workspaceDir: params.workspaceDir, slug: params.slug, extractedRoot: rootDir, mode: params.force ? "update" : "install", logger: params.logger, + scan: false, + rootMarkers: CLAWHUB_SKILL_ARCHIVE_ROOT_MARKERS, }), }); if (!install.ok) { - return install; + return { ok: false, error: install.error }; } const installedAt = Date.now(); @@ -341,7 +283,7 @@ async function installRequestedSkillFromClawHub( try { return await performClawHubSkillInstall({ ...params, - slug: validateRequestedSlug(params.slug), + slug: validateRequestedSkillSlug(params.slug), }); } catch (err) { return { @@ -357,7 +299,7 @@ async function installTrackedSkillFromClawHub( try { return await performClawHubSkillInstall({ ...params, - slug: normalizeTrackedSlug(params.slug), + slug: normalizeTrackedSkillSlug(params.slug), }); } catch (err) { return { @@ -373,7 +315,7 @@ async function resolveTrackedUpdateTarget(params: { lock: ClawHubSkillsLockfile; baseUrl?: string; }): Promise { - const targetDir = resolveSkillInstallDir(params.workspaceDir, params.slug); + const targetDir = resolveWorkspaceSkillInstallDir(params.workspaceDir, params.slug); const origin = (await readClawHubSkillOrigin(targetDir)) ?? null; if (!origin && !params.lock.skills[params.slug]) { return { @@ -416,7 +358,7 @@ export async function updateSkillsFromClawHub(params: { lock, }), ] - : Object.keys(lock.skills).map((slug) => normalizeTrackedSlug(slug)); + : Object.keys(lock.skills).map((slug) => normalizeTrackedSkillSlug(slug)); const results: UpdateClawHubSkillResult[] = []; for (const slug of slugs) { const tracked = await resolveTrackedUpdateTarget({ diff --git a/src/config/config.skills-entries-config.test.ts b/src/config/config.skills-entries-config.test.ts index 4ce0e390466..9eff1b6bcdd 100644 --- a/src/config/config.skills-entries-config.test.ts +++ b/src/config/config.skills-entries-config.test.ts @@ -83,6 +83,18 @@ describe("skills entries config schema", () => { expect(res.success).toBe(true); }); + it("accepts uploaded skill archive install policy", () => { + const res = OpenClawSchema.safeParse({ + skills: { + install: { + allowUploadedArchives: true, + }, + }, + }); + + expect(res.success).toBe(true); + }); + it("rejects legacy skills.policy config", () => { const res = OpenClawSchema.safeParse({ skills: { diff --git a/src/config/types.skills.ts b/src/config/types.skills.ts index 9c3dba9711c..d5fc3e9964c 100644 --- a/src/config/types.skills.ts +++ b/src/config/types.skills.ts @@ -27,6 +27,8 @@ export type SkillsLoadConfig = { export type SkillsInstallConfig = { preferBrew?: boolean; nodeManager?: "npm" | "pnpm" | "yarn" | "bun"; + /** Allow gateway clients to install zip archives staged through skills.upload.*. */ + allowUploadedArchives?: boolean; }; export type SkillsLimitsConfig = { diff --git a/src/config/zod-schema.ts b/src/config/zod-schema.ts index 15719d306c0..29aa3803d37 100644 --- a/src/config/zod-schema.ts +++ b/src/config/zod-schema.ts @@ -1092,6 +1092,7 @@ export const OpenClawSchema = z nodeManager: z .union([z.literal("npm"), z.literal("pnpm"), z.literal("yarn"), z.literal("bun")]) .optional(), + allowUploadedArchives: z.boolean().optional(), }) .strict() .optional(), diff --git a/src/gateway/method-scopes.ts b/src/gateway/method-scopes.ts index 888371a9e74..b3184de0960 100644 --- a/src/gateway/method-scopes.ts +++ b/src/gateway/method-scopes.ts @@ -188,6 +188,9 @@ const METHOD_SCOPE_GROUPS: Record = { "agents.create", "agents.update", "agents.delete", + "skills.upload.begin", + "skills.upload.chunk", + "skills.upload.commit", "skills.install", "skills.update", "secrets.reload", diff --git a/src/gateway/protocol/index.ts b/src/gateway/protocol/index.ts index 1f8439db43f..f7d2d102ab8 100644 --- a/src/gateway/protocol/index.ts +++ b/src/gateway/protocol/index.ts @@ -360,6 +360,12 @@ import { SkillsSearchResultSchema, type SkillsStatusParams, SkillsStatusParamsSchema, + type SkillsUploadBeginParams, + SkillsUploadBeginParamsSchema, + type SkillsUploadChunkParams, + SkillsUploadChunkParamsSchema, + type SkillsUploadCommitParams, + SkillsUploadCommitParamsSchema, type SkillsUpdateParams, SkillsUpdateParamsSchema, type ToolsCatalogParams, @@ -646,6 +652,15 @@ export const validateToolsInvokeParams = ajv.compile(ToolsInv export const validateSkillsBinsParams = ajv.compile(SkillsBinsParamsSchema); export const validateSkillsInstallParams = ajv.compile(SkillsInstallParamsSchema); +export const validateSkillsUploadBeginParams = ajv.compile( + SkillsUploadBeginParamsSchema, +); +export const validateSkillsUploadChunkParams = ajv.compile( + SkillsUploadChunkParamsSchema, +); +export const validateSkillsUploadCommitParams = ajv.compile( + SkillsUploadCommitParamsSchema, +); export const validateSkillsUpdateParams = ajv.compile(SkillsUpdateParamsSchema); export const validateSkillsSearchParams = ajv.compile(SkillsSearchParamsSchema); export const validateSkillsDetailParams = ajv.compile(SkillsDetailParamsSchema); @@ -905,6 +920,9 @@ export { SkillsSearchResultSchema, SkillsDetailParamsSchema, SkillsDetailResultSchema, + SkillsUploadBeginParamsSchema, + SkillsUploadChunkParamsSchema, + SkillsUploadCommitParamsSchema, SkillsUpdateParamsSchema, CronJobSchema, CronListParamsSchema, @@ -1042,6 +1060,9 @@ export type { SkillsSearchResult, SkillsDetailParams, SkillsDetailResult, + SkillsUploadBeginParams, + SkillsUploadChunkParams, + SkillsUploadCommitParams, SkillsInstallParams, SkillsUpdateParams, EnvironmentStatus, diff --git a/src/gateway/protocol/schema/agents-models-skills.ts b/src/gateway/protocol/schema/agents-models-skills.ts index 854ca5fd619..f7496f2e9c3 100644 --- a/src/gateway/protocol/schema/agents-models-skills.ts +++ b/src/gateway/protocol/schema/agents-models-skills.ts @@ -227,6 +227,49 @@ export const SkillsBinsResultSchema = Type.Object( { additionalProperties: false }, ); +const Sha256String = Type.String({ + minLength: 64, + maxLength: 64, + pattern: "^[a-fA-F0-9]{64}$", +}); +const SkillUploadIdempotencyKeyString = Type.String({ + minLength: 1, + maxLength: 2048, +}); +const SkillUploadDataBase64String = Type.String({ + minLength: 1, + maxLength: 5_592_408, +}); + +export const SkillsUploadBeginParamsSchema = Type.Object( + { + kind: Type.Literal("skill-archive"), + slug: NonEmptyString, + sizeBytes: Type.Integer({ minimum: 1 }), + sha256: Type.Optional(Sha256String), + force: Type.Optional(Type.Boolean()), + idempotencyKey: Type.Optional(SkillUploadIdempotencyKeyString), + }, + { additionalProperties: false }, +); + +export const SkillsUploadChunkParamsSchema = Type.Object( + { + uploadId: NonEmptyString, + offset: Type.Integer({ minimum: 0 }), + dataBase64: SkillUploadDataBase64String, + }, + { additionalProperties: false }, +); + +export const SkillsUploadCommitParamsSchema = Type.Object( + { + uploadId: NonEmptyString, + sha256: Type.Optional(Sha256String), + }, + { additionalProperties: false }, +); + export const SkillsInstallParamsSchema = Type.Union([ Type.Object( { @@ -247,6 +290,17 @@ export const SkillsInstallParamsSchema = Type.Union([ }, { additionalProperties: false }, ), + Type.Object( + { + source: Type.Literal("upload"), + uploadId: NonEmptyString, + slug: NonEmptyString, + force: Type.Optional(Type.Boolean()), + sha256: Type.Optional(Sha256String), + timeoutMs: Type.Optional(Type.Integer({ minimum: 1000 })), + }, + { additionalProperties: false }, + ), ]); export const SkillsUpdateParamsSchema = Type.Union([ diff --git a/src/gateway/protocol/schema/protocol-schemas.ts b/src/gateway/protocol/schema/protocol-schemas.ts index 46bb737b9d4..43dbef5801c 100644 --- a/src/gateway/protocol/schema/protocol-schemas.ts +++ b/src/gateway/protocol/schema/protocol-schemas.ts @@ -38,6 +38,9 @@ import { SkillsSearchParamsSchema, SkillsSearchResultSchema, SkillsStatusParamsSchema, + SkillsUploadBeginParamsSchema, + SkillsUploadChunkParamsSchema, + SkillsUploadCommitParamsSchema, SkillsUpdateParamsSchema, ToolCatalogEntrySchema, ToolCatalogGroupSchema, @@ -439,6 +442,9 @@ export const ProtocolSchemas = { SkillsSearchResult: SkillsSearchResultSchema, SkillsDetailParams: SkillsDetailParamsSchema, SkillsDetailResult: SkillsDetailResultSchema, + SkillsUploadBeginParams: SkillsUploadBeginParamsSchema, + SkillsUploadChunkParams: SkillsUploadChunkParamsSchema, + SkillsUploadCommitParams: SkillsUploadCommitParamsSchema, SkillsInstallParams: SkillsInstallParamsSchema, SkillsUpdateParams: SkillsUpdateParamsSchema, CronJob: CronJobSchema, diff --git a/src/gateway/protocol/schema/types.ts b/src/gateway/protocol/schema/types.ts index 2811697b8c6..e512dd08442 100644 --- a/src/gateway/protocol/schema/types.ts +++ b/src/gateway/protocol/schema/types.ts @@ -180,6 +180,9 @@ export type SkillsSearchParams = SchemaType<"SkillsSearchParams">; export type SkillsSearchResult = SchemaType<"SkillsSearchResult">; export type SkillsDetailParams = SchemaType<"SkillsDetailParams">; export type SkillsDetailResult = SchemaType<"SkillsDetailResult">; +export type SkillsUploadBeginParams = SchemaType<"SkillsUploadBeginParams">; +export type SkillsUploadChunkParams = SchemaType<"SkillsUploadChunkParams">; +export type SkillsUploadCommitParams = SchemaType<"SkillsUploadCommitParams">; export type SkillsInstallParams = SchemaType<"SkillsInstallParams">; export type SkillsUpdateParams = SchemaType<"SkillsUpdateParams">; export type CronJob = SchemaType<"CronJob">; diff --git a/src/gateway/server-methods-list.ts b/src/gateway/server-methods-list.ts index 3c86126a272..b87a0afa799 100644 --- a/src/gateway/server-methods-list.ts +++ b/src/gateway/server-methods-list.ts @@ -96,6 +96,9 @@ const BASE_METHODS = [ "skills.search", "skills.detail", "skills.bins", + "skills.upload.begin", + "skills.upload.chunk", + "skills.upload.commit", "skills.install", "skills.update", "update.status", diff --git a/src/gateway/server-methods/skills-upload-store.test.ts b/src/gateway/server-methods/skills-upload-store.test.ts new file mode 100644 index 00000000000..af319e47a31 --- /dev/null +++ b/src/gateway/server-methods/skills-upload-store.test.ts @@ -0,0 +1,409 @@ +import { createHash, randomUUID } from "node:crypto"; +import fs from "node:fs/promises"; +import os from "node:os"; +import path from "node:path"; +import { afterEach, beforeEach, describe, expect, it } from "vitest"; +import { + createSkillUploadStore, + MAX_ACTIVE_SKILL_UPLOADS, + SkillUploadRequestError, +} from "./skills-upload-store.js"; + +let tempDirs: string[] = []; + +async function makeTempDir(): Promise { + const dir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-skill-upload-store-")); + tempDirs.push(dir); + return dir; +} + +function sha256(bytes: Buffer): string { + return createHash("sha256").update(bytes).digest("hex"); +} + +function deferred() { + let resolve!: () => void; + const promise = new Promise((innerResolve) => { + resolve = innerResolve; + }); + return { promise, resolve }; +} + +async function expectUploadError( + promise: Promise, + message: string | RegExp, +): Promise { + try { + await promise; + } catch (err) { + expect(err).toBeInstanceOf(SkillUploadRequestError); + const actual = err instanceof Error ? err.message : String(err); + if (typeof message === "string") { + expect(actual).toBe(message); + } else { + expect(actual).toMatch(message); + } + return; + } + throw new Error("expected upload request error"); +} + +describe("skill upload store", () => { + beforeEach(() => { + tempDirs = []; + }); + + afterEach(async () => { + await Promise.all( + tempDirs.splice(0).map((dir) => fs.rm(dir, { recursive: true, force: true })), + ); + }); + + it("stores chunks and commits an archive with sha verification", async () => { + const rootDir = await makeTempDir(); + const store = createSkillUploadStore({ rootDir }); + const archive = Buffer.from("zip-bytes"); + const digest = sha256(archive); + const begin = await store.begin({ + kind: "skill-archive", + slug: "demo-skill", + sizeBytes: archive.length, + sha256: digest, + idempotencyKey: "same-upload", + }); + const repeated = await store.begin({ + kind: "skill-archive", + slug: "demo-skill", + sizeBytes: archive.length, + sha256: digest, + idempotencyKey: "same-upload", + }); + + expect(repeated.uploadId).toBe(begin.uploadId); + + await store.chunk({ + uploadId: begin.uploadId, + offset: 0, + dataBase64: archive.subarray(0, 3).toString("base64"), + }); + const chunk = await store.chunk({ + uploadId: begin.uploadId, + offset: 3, + dataBase64: archive.subarray(3).toString("base64"), + }); + expect(chunk.receivedBytes).toBe(archive.length); + + const commit = await store.commit({ uploadId: begin.uploadId, sha256: digest }); + expect(commit).toMatchObject({ + uploadId: begin.uploadId, + receivedBytes: archive.length, + sha256: digest, + }); + + const record = await store.withCommittedUpload(begin.uploadId, async (committedRecord) => { + return committedRecord; + }); + expect(record).toMatchObject({ + uploadId: begin.uploadId, + slug: "demo-skill", + force: false, + receivedBytes: archive.length, + actualSha256: digest, + committed: true, + }); + await expectUploadError( + store.chunk({ + uploadId: begin.uploadId, + offset: archive.length, + dataBase64: Buffer.from("x").toString("base64"), + }), + "upload is already committed", + ); + }); + + it("rejects traversal slugs and missing uploads", async () => { + const rootDir = await makeTempDir(); + const store = createSkillUploadStore({ rootDir }); + + await expectUploadError( + store.begin({ + kind: "skill-archive", + slug: "../escape", + sizeBytes: 1, + }), + "Invalid skill slug: ../escape", + ); + await expectUploadError( + store.withCommittedUpload(randomUUID(), async (record) => record), + /^upload not found: /, + ); + }); + + it("rejects offset, size, and sha mismatches", async () => { + const rootDir = await makeTempDir(); + const store = createSkillUploadStore({ rootDir }); + const archive = Buffer.from("abc"); + const begin = await store.begin({ + kind: "skill-archive", + slug: "demo-skill", + sizeBytes: archive.length, + }); + + await expectUploadError( + store.chunk({ + uploadId: begin.uploadId, + offset: 1, + dataBase64: archive.subarray(0, 1).toString("base64"), + }), + "upload offset mismatch: expected 0, got 1", + ); + await expectUploadError( + store.chunk({ + uploadId: begin.uploadId, + offset: 0, + dataBase64: Buffer.from("abcd").toString("base64"), + }), + "upload chunk exceeds declared size", + ); + await store.chunk({ + uploadId: begin.uploadId, + offset: 0, + dataBase64: archive.subarray(0, 2).toString("base64"), + }); + await expectUploadError( + store.commit({ uploadId: begin.uploadId }), + "upload size mismatch: expected 3, got 2", + ); + + const second = await store.begin({ + kind: "skill-archive", + slug: "second-skill", + sizeBytes: archive.length, + }); + await store.chunk({ + uploadId: second.uploadId, + offset: 0, + dataBase64: archive.toString("base64"), + }); + await expectUploadError( + store.commit({ uploadId: second.uploadId, sha256: "0".repeat(64) }), + "upload sha256 mismatch", + ); + }); + + it("truncates stale archive tails before retrying a chunk at the recorded offset", async () => { + const rootDir = await makeTempDir(); + const store = createSkillUploadStore({ rootDir }); + const archive = Buffer.from("abcdef"); + const begin = await store.begin({ + kind: "skill-archive", + slug: "retry-skill", + sizeBytes: archive.length, + }); + + await store.chunk({ + uploadId: begin.uploadId, + offset: 0, + dataBase64: archive.subarray(0, 3).toString("base64"), + }); + const archivePath = path.join(rootDir, begin.uploadId, "archive.zip"); + await fs.appendFile(archivePath, Buffer.from("stale-tail")); + await store.chunk({ + uploadId: begin.uploadId, + offset: 3, + dataBase64: archive.subarray(3).toString("base64"), + }); + + await expect(fs.readFile(archivePath)).resolves.toEqual(archive); + const commit = await store.commit({ uploadId: begin.uploadId, sha256: sha256(archive) }); + expect(commit.sha256).toBe(sha256(archive)); + }); + + it("rejects idempotent commit when committed metadata is missing the actual sha", async () => { + const rootDir = await makeTempDir(); + const store = createSkillUploadStore({ rootDir }); + const archive = Buffer.from("abc"); + const begin = await store.begin({ + kind: "skill-archive", + slug: "corrupt-skill", + sizeBytes: archive.length, + }); + await store.chunk({ + uploadId: begin.uploadId, + offset: 0, + dataBase64: archive.toString("base64"), + }); + await store.commit({ uploadId: begin.uploadId }); + const metadataPath = path.join(rootDir, begin.uploadId, "metadata.json"); + const metadata = JSON.parse(await fs.readFile(metadataPath, "utf8")) as Record; + delete metadata.actualSha256; + await fs.writeFile(metadataPath, `${JSON.stringify(metadata, null, 2)}\n`, "utf8"); + + await expectUploadError( + store.commit({ uploadId: begin.uploadId }), + "committed upload is missing sha256", + ); + }); + + it("limits active uploads", async () => { + const rootDir = await makeTempDir(); + const store = createSkillUploadStore({ rootDir }); + for (let i = 0; i < MAX_ACTIVE_SKILL_UPLOADS; i += 1) { + await store.begin({ + kind: "skill-archive", + slug: `active-${i}`, + sizeBytes: 1, + }); + } + + await expectUploadError( + store.begin({ + kind: "skill-archive", + slug: "too-many", + sizeBytes: 1, + }), + "too many active skill uploads", + ); + }); + + it("expires unfinished and committed uploads", async () => { + let now = 1000; + const rootDir = await makeTempDir(); + const store = createSkillUploadStore({ + rootDir, + ttlMs: 10, + now: () => now, + }); + const archive = Buffer.from("abc"); + const begin = await store.begin({ + kind: "skill-archive", + slug: "demo-skill", + sizeBytes: archive.length, + }); + + now = 1011; + await expectUploadError( + store.chunk({ + uploadId: begin.uploadId, + offset: 0, + dataBase64: archive.toString("base64"), + }), + "upload has expired", + ); + + now = 2000; + const committed = await store.begin({ + kind: "skill-archive", + slug: "committed-skill", + sizeBytes: archive.length, + }); + await store.chunk({ + uploadId: committed.uploadId, + offset: 0, + dataBase64: archive.toString("base64"), + }); + await store.commit({ uploadId: committed.uploadId }); + now = 2011; + await expectUploadError( + store.withCommittedUpload(committed.uploadId, async (record) => record), + "upload has expired", + ); + }); + + it("does not sweep committed uploads while an install holds the upload lock", async () => { + let now = 1000; + const rootDir = await makeTempDir(); + const store = createSkillUploadStore({ + rootDir, + ttlMs: 10, + now: () => now, + }); + const archive = Buffer.from("abc"); + const committed = await store.begin({ + kind: "skill-archive", + slug: "pinned-skill", + sizeBytes: archive.length, + }); + await store.chunk({ + uploadId: committed.uploadId, + offset: 0, + dataBase64: archive.toString("base64"), + }); + await store.commit({ uploadId: committed.uploadId }); + + const entered = deferred(); + const release = deferred(); + const pinned = store.withCommittedUpload(committed.uploadId, async () => { + entered.resolve(); + await release.promise; + return true; + }); + await entered.promise; + + now = 1011; + const sweep = store.begin({ + kind: "skill-archive", + slug: "sweep-trigger", + sizeBytes: 1, + }); + await new Promise((resolve) => setImmediate(resolve)); + await expect(fs.stat(path.join(rootDir, committed.uploadId))).resolves.toBeTruthy(); + + release.resolve(); + await expect(pinned).resolves.toBe(true); + await sweep; + await expect(fs.stat(path.join(rootDir, committed.uploadId))).rejects.toMatchObject({ + code: "ENOENT", + }); + }); + + it("does not remove expired idempotent uploads while an install holds the upload lock", async () => { + let now = 1000; + const rootDir = await makeTempDir(); + const store = createSkillUploadStore({ + rootDir, + ttlMs: 10, + now: () => now, + }); + const archive = Buffer.from("abc"); + const committed = await store.begin({ + kind: "skill-archive", + slug: "idempotent-skill", + sizeBytes: archive.length, + idempotencyKey: "same-upload", + }); + await store.chunk({ + uploadId: committed.uploadId, + offset: 0, + dataBase64: archive.toString("base64"), + }); + await store.commit({ uploadId: committed.uploadId }); + + const entered = deferred(); + const release = deferred(); + const pinned = store.withCommittedUpload(committed.uploadId, async () => { + entered.resolve(); + await release.promise; + return true; + }); + await entered.promise; + + now = 1011; + const repeated = store.begin({ + kind: "skill-archive", + slug: "idempotent-skill", + sizeBytes: archive.length, + idempotencyKey: "same-upload", + }); + await new Promise((resolve) => setImmediate(resolve)); + await expect(fs.stat(path.join(rootDir, committed.uploadId))).resolves.toBeTruthy(); + + release.resolve(); + await expect(pinned).resolves.toBe(true); + const next = await repeated; + expect(next.uploadId).not.toBe(committed.uploadId); + await expect(fs.stat(path.join(rootDir, committed.uploadId))).rejects.toMatchObject({ + code: "ENOENT", + }); + }); +}); diff --git a/src/gateway/server-methods/skills-upload-store.ts b/src/gateway/server-methods/skills-upload-store.ts new file mode 100644 index 00000000000..72235e4bed1 --- /dev/null +++ b/src/gateway/server-methods/skills-upload-store.ts @@ -0,0 +1,592 @@ +import { createHash, randomUUID } from "node:crypto"; +import { createReadStream } from "node:fs"; +import fs from "node:fs/promises"; +import path from "node:path"; +import { validateRequestedSkillSlug } from "../../agents/skills-archive-install.js"; +import { resolveStateDir } from "../../config/paths.js"; +import { DEFAULT_MAX_ARCHIVE_BYTES_ZIP } from "../../infra/archive.js"; +import { formatErrorMessage } from "../../infra/errors.js"; +import { createAsyncLock, readDurableJsonFile, writeJsonAtomic } from "../../infra/json-files.js"; + +export const SKILL_UPLOAD_TTL_MS = 60 * 60 * 1000; +export const MAX_SKILL_UPLOAD_CHUNK_BYTES = 4 * 1024 * 1024; +export const MAX_SKILL_UPLOAD_BASE64_LENGTH = Math.ceil(MAX_SKILL_UPLOAD_CHUNK_BYTES / 3) * 4; +export const MAX_ACTIVE_SKILL_UPLOADS = 32; +export const SKILL_UPLOAD_IDEMPOTENCY_KEY_MAX_LENGTH = 2048; + +const SHA256_PATTERN = /^[a-f0-9]{64}$/i; +const UPLOAD_ID_PATTERN = + /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i; +const BASE64_PATTERN = /^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$/; +const locks = new Map; references: number }>(); + +export class SkillUploadRequestError extends Error { + constructor(message: string) { + super(message); + this.name = "SkillUploadRequestError"; + } +} + +export type SkillUploadRecord = { + version: 1; + kind: "skill-archive"; + uploadId: string; + slug: string; + force: boolean; + sizeBytes: number; + sha256?: string; + actualSha256?: string; + receivedBytes: number; + archivePath: string; + createdAt: number; + expiresAt: number; + committed: boolean; + committedAt?: number; + idempotencyKeyHash?: string; +}; + +export type SkillUploadStore = ReturnType; + +type BeginParams = { + kind: "skill-archive"; + slug: string; + sizeBytes: number; + sha256?: string; + force?: boolean; + idempotencyKey?: string; +}; + +type ChunkParams = { + uploadId: string; + offset: number; + dataBase64: string; +}; + +type CommitParams = { + uploadId: string; + sha256?: string; +}; + +type IdempotencyRecord = { + version: 1; + keyHash: string; + uploadId: string; + kind: "skill-archive"; + slug: string; + force: boolean; + sizeBytes: number; + sha256?: string; +}; + +async function withLock(key: string, fn: () => Promise): Promise { + let entry = locks.get(key); + if (!entry) { + entry = { lock: createAsyncLock(), references: 0 }; + locks.set(key, entry); + } + entry.references += 1; + try { + return await entry.lock(fn); + } finally { + entry.references -= 1; + if (entry.references === 0) { + locks.delete(key); + } + } +} + +export function normalizeSkillUploadSha256(value: string | undefined): string | undefined { + if (value === undefined) { + return undefined; + } + const normalized = value.trim().toLowerCase(); + if (!SHA256_PATTERN.test(normalized)) { + throw new SkillUploadRequestError("invalid sha256"); + } + return normalized; +} + +function validateUploadId(uploadId: string): string { + const normalized = uploadId.trim(); + if (!UPLOAD_ID_PATTERN.test(normalized)) { + throw new SkillUploadRequestError("invalid uploadId"); + } + return normalized; +} + +function isUploadId(value: string): boolean { + return UPLOAD_ID_PATTERN.test(value); +} + +function validateSizeBytes(sizeBytes: number): number { + if (!Number.isSafeInteger(sizeBytes) || sizeBytes < 1) { + throw new SkillUploadRequestError("invalid sizeBytes"); + } + if (sizeBytes > DEFAULT_MAX_ARCHIVE_BYTES_ZIP) { + throw new SkillUploadRequestError("skill archive exceeds maximum upload size"); + } + return sizeBytes; +} + +function validateUploadSlug(slug: string): string { + try { + return validateRequestedSkillSlug(slug); + } catch (err) { + throw new SkillUploadRequestError(formatErrorMessage(err)); + } +} + +function validateOffset(offset: number): number { + if (!Number.isSafeInteger(offset) || offset < 0) { + throw new SkillUploadRequestError("invalid offset"); + } + return offset; +} + +function validateIdempotencyKey(value: string | undefined): string | undefined { + const normalized = value?.trim(); + if (!normalized) { + return undefined; + } + if (normalized.length > SKILL_UPLOAD_IDEMPOTENCY_KEY_MAX_LENGTH) { + throw new SkillUploadRequestError("idempotencyKey is too long"); + } + return normalized; +} + +function hashText(value: string): string { + return createHash("sha256").update(value).digest("hex"); +} + +function resolveUploadsRoot(rootDir?: string): string { + return path.resolve(rootDir ?? path.join(resolveStateDir(), "tmp", "skill-uploads")); +} + +function resolveUploadDir(rootDir: string, uploadId: string): string { + return path.join(rootDir, validateUploadId(uploadId)); +} + +function resolveMetadataPath(rootDir: string, uploadId: string): string { + return path.join(resolveUploadDir(rootDir, uploadId), "metadata.json"); +} + +function resolveArchivePath(rootDir: string, uploadId: string): string { + return path.join(resolveUploadDir(rootDir, uploadId), "archive.zip"); +} + +function resolveIdempotencyPath(rootDir: string, keyHash: string): string { + return path.join(rootDir, "idempotency", `${keyHash}.json`); +} + +function estimateBase64DecodedBytes(value: string): number { + const padding = value.endsWith("==") ? 2 : value.endsWith("=") ? 1 : 0; + return (value.length / 4) * 3 - padding; +} + +function decodeBase64Chunk(dataBase64: string): Buffer { + const normalized = dataBase64.trim(); + if (!normalized || normalized.length % 4 !== 0 || !BASE64_PATTERN.test(normalized)) { + throw new SkillUploadRequestError("invalid dataBase64"); + } + if (normalized.length > MAX_SKILL_UPLOAD_BASE64_LENGTH) { + throw new SkillUploadRequestError("upload chunk exceeds maximum size"); + } + if (estimateBase64DecodedBytes(normalized) > MAX_SKILL_UPLOAD_CHUNK_BYTES) { + throw new SkillUploadRequestError("upload chunk exceeds maximum size"); + } + const decoded = Buffer.from(normalized, "base64"); + if (decoded.length < 1) { + throw new SkillUploadRequestError("empty upload chunk"); + } + if (decoded.length > MAX_SKILL_UPLOAD_CHUNK_BYTES) { + throw new SkillUploadRequestError("upload chunk exceeds maximum size"); + } + return decoded; +} + +async function assertNotExpired( + rootDir: string, + record: SkillUploadRecord, + now: number, +): Promise { + if (record.expiresAt <= now) { + await removeRecordFiles(rootDir, record); + throw new SkillUploadRequestError("upload has expired"); + } +} + +async function computeFileSha256(filePath: string): Promise { + const digest = createHash("sha256"); + for await (const chunk of createReadStream(filePath)) { + digest.update(chunk); + } + return digest.digest("hex"); +} + +async function readRecord(rootDir: string, uploadId: string): Promise { + const record = await readDurableJsonFile( + resolveMetadataPath(rootDir, uploadId), + ); + if (!record || record.version !== 1 || record.uploadId !== uploadId) { + throw new SkillUploadRequestError(`upload not found: ${uploadId}`); + } + return { ...record, archivePath: resolveArchivePath(rootDir, uploadId) }; +} + +async function readRecordIfPresent( + rootDir: string, + uploadId: string, +): Promise { + const record = await readDurableJsonFile( + resolveMetadataPath(rootDir, uploadId), + ); + if (!record || record.version !== 1 || record.uploadId !== uploadId) { + return null; + } + return { + ...record, + archivePath: resolveArchivePath(rootDir, uploadId), + }; +} + +async function writeRecord(rootDir: string, record: SkillUploadRecord): Promise { + await writeJsonAtomic(resolveMetadataPath(rootDir, record.uploadId), record, { + mode: 0o600, + dirMode: 0o700, + trailingNewline: true, + }); +} + +async function removeUploadDir(rootDir: string, uploadId: string): Promise { + await fs.rm(resolveUploadDir(rootDir, uploadId), { recursive: true, force: true }); +} + +async function removeRecordFiles(rootDir: string, record: SkillUploadRecord): Promise { + await removeUploadDir(rootDir, record.uploadId); + if (record.idempotencyKeyHash) { + await fs.rm(resolveIdempotencyPath(rootDir, record.idempotencyKeyHash), { force: true }); + } +} + +async function listUploadIds(rootDir: string): Promise { + const entries = await fs.readdir(rootDir, { withFileTypes: true }).catch(() => []); + return entries + .filter((entry) => entry.isDirectory() && isUploadId(entry.name)) + .map((entry) => entry.name); +} + +async function cleanupExpiredUploads( + rootDir: string, + nowMs: number, + excludeUploadId?: string, +): Promise { + for (const uploadId of await listUploadIds(rootDir)) { + if (uploadId === excludeUploadId) { + continue; + } + await withLock(`${rootDir}:upload:${uploadId}`, async () => { + const record = await readRecordIfPresent(rootDir, uploadId).catch(() => null); + if (record && record.expiresAt <= nowMs) { + await removeRecordFiles(rootDir, record); + } + }); + } +} + +async function countActiveUploads(rootDir: string, nowMs: number): Promise { + let count = 0; + for (const uploadId of await listUploadIds(rootDir)) { + const record = await readRecordIfPresent(rootDir, uploadId).catch(() => null); + if (record && record.expiresAt > nowMs) { + count += 1; + } + } + return count; +} + +async function writeArchiveChunk(params: { + archivePath: string; + offset: number; + decoded: Buffer; + afterSync: () => Promise; +}): Promise { + const handle = await fs.open(params.archivePath, "r+"); + try { + await handle.truncate(params.offset); + let written = 0; + while (written < params.decoded.length) { + const result = await handle.write( + params.decoded, + written, + params.decoded.length - written, + params.offset + written, + ); + if (result.bytesWritten <= 0) { + throw new Error("failed to write upload chunk"); + } + written += result.bytesWritten; + } + await handle.sync(); + await params.afterSync(); + } finally { + await handle.close().catch(() => undefined); + } +} + +async function readCommittedRecord( + rootDir: string, + uploadId: string, + nowMs: number, +): Promise { + const record = await readRecord(rootDir, uploadId); + await assertNotExpired(rootDir, record, nowMs); + if (!record.committed) { + throw new SkillUploadRequestError("upload is not committed"); + } + if (!record.actualSha256) { + throw new SkillUploadRequestError("committed upload is missing sha256"); + } + const stat = await fs.stat(record.archivePath).catch(() => null); + if (!stat || stat.size !== record.sizeBytes) { + throw new SkillUploadRequestError("uploaded archive is missing or incomplete"); + } + return record; +} + +export function createSkillUploadStore(options?: { + rootDir?: string; + now?: () => number; + ttlMs?: number; +}) { + const rootDir = resolveUploadsRoot(options?.rootDir); + const now = options?.now ?? Date.now; + const ttlMs = options?.ttlMs ?? SKILL_UPLOAD_TTL_MS; + + return { + rootDir, + async begin(params: BeginParams) { + return await withLock(`${rootDir}:begin`, async () => { + await cleanupExpiredUploads(rootDir, now()); + if (params.kind !== "skill-archive") { + throw new SkillUploadRequestError("unsupported upload kind"); + } + const slug = validateUploadSlug(params.slug); + const sizeBytes = validateSizeBytes(params.sizeBytes); + const sha256 = normalizeSkillUploadSha256(params.sha256); + const force = params.force === true; + const idempotencyKey = validateIdempotencyKey(params.idempotencyKey); + const keyHash = idempotencyKey ? hashText(idempotencyKey) : undefined; + if (keyHash) { + const existing = await readDurableJsonFile( + resolveIdempotencyPath(rootDir, keyHash), + ); + if (existing) { + if ( + existing.kind !== params.kind || + existing.slug !== slug || + existing.force !== force || + existing.sizeBytes !== sizeBytes || + existing.sha256 !== sha256 + ) { + throw new SkillUploadRequestError("idempotencyKey conflicts with a different upload"); + } + const existingUploadId = validateUploadId(existing.uploadId); + const activeExisting = await withLock( + `${rootDir}:upload:${existingUploadId}`, + async () => { + const record = await readRecordIfPresent(rootDir, existingUploadId); + if (record && record.expiresAt > now()) { + return { + uploadId: record.uploadId, + receivedBytes: record.receivedBytes, + expiresAt: record.expiresAt, + }; + } + if (record) { + await removeRecordFiles(rootDir, record); + } else { + await removeUploadDir(rootDir, existingUploadId); + } + return null; + }, + ); + if (activeExisting) { + return activeExisting; + } + } + } + + if ((await countActiveUploads(rootDir, now())) >= MAX_ACTIVE_SKILL_UPLOADS) { + throw new SkillUploadRequestError("too many active skill uploads"); + } + + const uploadId = randomUUID(); + const uploadDir = resolveUploadDir(rootDir, uploadId); + const archivePath = resolveArchivePath(rootDir, uploadId); + const createdAt = now(); + const record: SkillUploadRecord = { + version: 1, + kind: params.kind, + uploadId, + slug, + force, + sizeBytes, + ...(sha256 ? { sha256 } : {}), + receivedBytes: 0, + archivePath, + createdAt, + expiresAt: createdAt + ttlMs, + committed: false, + ...(keyHash ? { idempotencyKeyHash: keyHash } : {}), + }; + + await fs.mkdir(uploadDir, { recursive: true, mode: 0o700 }); + await fs.writeFile(archivePath, Buffer.alloc(0), { mode: 0o600 }); + await writeRecord(rootDir, record); + if (keyHash) { + const idem: IdempotencyRecord = { + version: 1, + keyHash, + uploadId, + kind: params.kind, + slug, + force, + sizeBytes, + ...(sha256 ? { sha256 } : {}), + }; + await writeJsonAtomic(resolveIdempotencyPath(rootDir, keyHash), idem, { + mode: 0o600, + dirMode: 0o700, + trailingNewline: true, + }); + } + return { + uploadId, + receivedBytes: 0, + expiresAt: record.expiresAt, + }; + }); + }, + async chunk(params: ChunkParams) { + const uploadId = validateUploadId(params.uploadId); + const offset = validateOffset(params.offset); + const decoded = decodeBase64Chunk(params.dataBase64); + await cleanupExpiredUploads(rootDir, now(), uploadId); + return await withLock(`${rootDir}:upload:${uploadId}`, async () => { + const record = await readRecord(rootDir, uploadId); + await assertNotExpired(rootDir, record, now()); + if (record.committed) { + throw new SkillUploadRequestError("upload is already committed"); + } + if (offset !== record.receivedBytes) { + throw new SkillUploadRequestError( + `upload offset mismatch: expected ${record.receivedBytes}, got ${offset}`, + ); + } + const nextSize = record.receivedBytes + decoded.length; + if (nextSize > record.sizeBytes) { + throw new SkillUploadRequestError("upload chunk exceeds declared size"); + } + const nextRecord = { + ...record, + receivedBytes: nextSize, + }; + await writeArchiveChunk({ + archivePath: record.archivePath, + offset: record.receivedBytes, + decoded, + afterSync: async () => { + await writeRecord(rootDir, nextRecord); + }, + }); + return { + uploadId, + receivedBytes: nextRecord.receivedBytes, + expiresAt: nextRecord.expiresAt, + }; + }); + }, + async commit(params: CommitParams) { + const uploadId = validateUploadId(params.uploadId); + const requestedSha = normalizeSkillUploadSha256(params.sha256); + return await withLock(`${rootDir}:upload:${uploadId}`, async () => { + const record = await readRecord(rootDir, uploadId); + await assertNotExpired(rootDir, record, now()); + if (record.committed) { + if (!record.actualSha256) { + throw new SkillUploadRequestError("committed upload is missing sha256"); + } + if (requestedSha && requestedSha !== record.actualSha256) { + throw new SkillUploadRequestError("upload sha256 mismatch"); + } + return { + uploadId, + receivedBytes: record.receivedBytes, + sha256: record.actualSha256, + expiresAt: record.expiresAt, + }; + } + if (record.receivedBytes !== record.sizeBytes) { + throw new SkillUploadRequestError( + `upload size mismatch: expected ${record.sizeBytes}, got ${record.receivedBytes}`, + ); + } + const stat = await fs.stat(record.archivePath).catch(() => null); + if (!stat || stat.size !== record.sizeBytes) { + throw new SkillUploadRequestError("uploaded archive is missing or incomplete"); + } + if (record.sha256 && requestedSha && record.sha256 !== requestedSha) { + throw new SkillUploadRequestError("upload sha256 does not match begin sha256"); + } + const actualSha256 = await computeFileSha256(record.archivePath); + const expectedSha = requestedSha ?? record.sha256; + if (expectedSha && expectedSha !== actualSha256) { + throw new SkillUploadRequestError("upload sha256 mismatch"); + } + const nextRecord = { + ...record, + sha256: record.sha256 ?? requestedSha ?? actualSha256, + actualSha256, + committed: true, + committedAt: now(), + }; + await writeRecord(rootDir, nextRecord); + return { + uploadId, + receivedBytes: nextRecord.receivedBytes, + sha256: actualSha256, + expiresAt: nextRecord.expiresAt, + }; + }); + }, + async withCommittedUpload( + uploadIdRaw: string, + action: (record: SkillUploadRecord, controls: { remove: () => Promise }) => Promise, + ): Promise { + const uploadId = validateUploadId(uploadIdRaw); + return await withLock(`${rootDir}:upload:${uploadId}`, async () => { + const record = await readCommittedRecord(rootDir, uploadId, now()); + return await action(record, { + remove: async () => { + await removeRecordFiles(rootDir, record); + }, + }); + }); + }, + async remove(uploadIdRaw: string): Promise { + const uploadId = validateUploadId(uploadIdRaw); + await withLock(`${rootDir}:upload:${uploadId}`, async () => { + const record = await readDurableJsonFile( + resolveMetadataPath(rootDir, uploadId), + ); + if (record && record.version === 1 && record.uploadId === uploadId) { + await removeRecordFiles(rootDir, record); + } else { + await removeUploadDir(rootDir, uploadId); + } + }); + }, + }; +} + +export const defaultSkillUploadStore = createSkillUploadStore(); diff --git a/src/gateway/server-methods/skills-upload.test.ts b/src/gateway/server-methods/skills-upload.test.ts new file mode 100644 index 00000000000..675f99441a9 --- /dev/null +++ b/src/gateway/server-methods/skills-upload.test.ts @@ -0,0 +1,609 @@ +import { createHash, randomUUID } from "node:crypto"; +import fs from "node:fs/promises"; +import os from "node:os"; +import path from "node:path"; +import JSZip from "jszip"; +import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; +import type { GatewayRequestHandlers } from "./types.js"; + +const agentScopeState = vi.hoisted(() => ({ + workspaceDir: "", +})); + +const installSecurityScanState = vi.hoisted(() => ({ + scanSkillInstallSource: vi.fn(), +})); + +const replaceFileState = vi.hoisted(() => ({ + publishFailureTarget: "", + publishFailures: 0, +})); + +vi.mock("../../agents/agent-scope.js", async (importOriginal) => { + const actual = await importOriginal(); + return { + ...actual, + listAgentIds: vi.fn(() => ["main"]), + resolveAgentWorkspaceDir: vi.fn(() => agentScopeState.workspaceDir), + resolveDefaultAgentId: vi.fn(() => "main"), + }; +}); + +vi.mock("../../plugins/install-security-scan.js", () => ({ + scanSkillInstallSource: installSecurityScanState.scanSkillInstallSource, +})); + +vi.mock("../../infra/replace-file.js", async (importOriginal) => { + const actual = await importOriginal(); + return { + ...actual, + movePathWithCopyFallback: async ( + options: Parameters[0], + ) => { + if ( + replaceFileState.publishFailures === 0 && + replaceFileState.publishFailureTarget && + options.from.includes(".openclaw-install-stage-") && + options.to === replaceFileState.publishFailureTarget + ) { + replaceFileState.publishFailures += 1; + throw new Error("publish boom"); + } + return await actual.movePathWithCopyFallback(options); + }, + }; +}); + +let tempDirs: string[] = []; + +type CallResult = { + ok: boolean; + payload?: unknown; + error?: { code?: string; message?: string }; +}; + +async function makeHarness(): Promise<{ + handlers: GatewayRequestHandlers; + stateDir: string; + workspaceDir: string; +}> { + const root = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-skill-upload-handler-")); + tempDirs.push(root); + const stateDir = path.join(root, "state"); + const workspaceDir = path.join(root, "workspace"); + await fs.mkdir(workspaceDir, { recursive: true }); + vi.stubEnv("OPENCLAW_STATE_DIR", stateDir); + agentScopeState.workspaceDir = workspaceDir; + vi.resetModules(); + const { skillsHandlers } = await import("./skills.js"); + return { handlers: skillsHandlers, stateDir, workspaceDir }; +} + +function makeContext( + config: Record = { + skills: { install: { allowUploadedArchives: true } }, + }, +) { + return { + getRuntimeConfig: () => config, + logGateway: { + debug: vi.fn(), + error: vi.fn(), + info: vi.fn(), + warn: vi.fn(), + }, + }; +} + +async function call( + handlers: GatewayRequestHandlers, + method: string, + params: Record, + options: { config?: Record } = {}, +): Promise { + const handler = handlers[method]; + if (!handler) { + throw new Error(`missing handler: ${method}`); + } + let result: CallResult | undefined; + await handler({ + params, + req: { method } as never, + client: null, + isWebchatConnect: () => false, + context: makeContext(options.config) as never, + respond: (ok, payload, error) => { + result = { ok, payload, error }; + }, + }); + if (!result) { + throw new Error(`handler did not respond: ${method}`); + } + return result; +} + +function sha256(bytes: Buffer): string { + return createHash("sha256").update(bytes).digest("hex"); +} + +async function makeSkillArchive(params: { + name?: string; + description?: string; + body?: string; + rootDir?: string; + skillFileName?: string; + traversal?: boolean; + missingSkill?: boolean; +}): Promise { + const zip = new JSZip(); + const prefix = params.rootDir ? `${params.rootDir.replace(/\/+$/, "")}/` : ""; + if (params.missingSkill) { + zip.file(`${prefix}README.md`, "not a skill"); + } else { + zip.file( + `${prefix}${params.skillFileName ?? "SKILL.md"}`, + [ + "---", + `name: ${params.name ?? "Uploaded Demo"}`, + `description: ${params.description ?? "Installed from upload"}`, + "---", + "", + params.body ?? "# Uploaded demo", + "", + ].join("\n"), + ); + } + if (params.traversal) { + zip.file("../evil.txt", "owned"); + } + return Buffer.from(await zip.generateAsync({ type: "nodebuffer" })); +} + +async function uploadArchive( + handlers: GatewayRequestHandlers, + params: { + archive: Buffer; + slug: string; + force?: boolean; + }, +): Promise<{ uploadId: string; sha256: string }> { + const digest = sha256(params.archive); + const begin = await call(handlers, "skills.upload.begin", { + kind: "skill-archive", + slug: params.slug, + sizeBytes: params.archive.length, + sha256: digest, + force: params.force, + }); + expect(begin.ok).toBe(true); + const uploadId = (begin.payload as { uploadId: string }).uploadId; + const chunk = await call(handlers, "skills.upload.chunk", { + uploadId, + offset: 0, + dataBase64: params.archive.toString("base64"), + }); + expect(chunk.ok).toBe(true); + const commit = await call(handlers, "skills.upload.commit", { + uploadId, + sha256: digest, + }); + expect(commit.ok).toBe(true); + return { uploadId, sha256: digest }; +} + +describe("skill upload gateway handlers", () => { + beforeEach(() => { + tempDirs = []; + vi.unstubAllEnvs(); + replaceFileState.publishFailureTarget = ""; + replaceFileState.publishFailures = 0; + installSecurityScanState.scanSkillInstallSource.mockReset(); + installSecurityScanState.scanSkillInstallSource.mockResolvedValue(undefined); + }); + + afterEach(async () => { + vi.unstubAllEnvs(); + vi.restoreAllMocks(); + await Promise.all( + tempDirs.splice(0).map((dir) => fs.rm(dir, { recursive: true, force: true })), + ); + }); + + it("rejects upload archive RPCs and upload installs when disabled by config", async () => { + const { handlers, stateDir } = await makeHarness(); + const config = { skills: { install: { allowUploadedArchives: false } } }; + const archive = await makeSkillArchive({}); + const begin = await call( + handlers, + "skills.upload.begin", + { + kind: "skill-archive", + slug: "disabled-skill", + sizeBytes: archive.length, + }, + { config }, + ); + + expect(begin.ok).toBe(false); + expect(begin.error?.code).toBe("UNAVAILABLE"); + expect(begin.error?.message).toContain("skills.install.allowUploadedArchives"); + await expect(fs.stat(path.join(stateDir, "tmp", "skill-uploads"))).rejects.toMatchObject({ + code: "ENOENT", + }); + + const install = await call( + handlers, + "skills.install", + { + source: "upload", + uploadId: randomUUID(), + slug: "disabled-skill", + }, + { config }, + ); + expect(install.ok).toBe(false); + expect(install.error?.code).toBe("UNAVAILABLE"); + expect(install.error?.message).toContain("skills.install.allowUploadedArchives"); + }); + + it("uploads, installs, cleans up, and reports the skill from status", async () => { + const { handlers, stateDir, workspaceDir } = await makeHarness(); + const archive = await makeSkillArchive({ + name: "Uploaded Demo", + rootDir: "archive-internal-name", + }); + const { uploadId, sha256: digest } = await uploadArchive(handlers, { + archive, + slug: "uploaded-demo", + }); + + const install = await call(handlers, "skills.install", { + source: "upload", + uploadId, + slug: "uploaded-demo", + sha256: digest, + }); + + expect(install.ok).toBe(true); + expect(install.payload).toMatchObject({ + ok: true, + slug: "uploaded-demo", + sha256: digest, + }); + await expect( + fs.readFile(path.join(workspaceDir, "skills", "uploaded-demo", "SKILL.md"), "utf8"), + ).resolves.toContain("Uploaded Demo"); + await expect( + fs.stat(path.join(workspaceDir, "skills", "archive-internal-name")), + ).rejects.toMatchObject({ code: "ENOENT" }); + await expect( + fs.stat(path.join(stateDir, "tmp", "skill-uploads", uploadId)), + ).rejects.toMatchObject({ code: "ENOENT" }); + + const status = await call(handlers, "skills.status", {}); + expect(status.ok).toBe(true); + expect(JSON.stringify(status.payload)).toContain("Uploaded Demo"); + }); + + it("rejects install before commit and missing upload ids", async () => { + const { handlers } = await makeHarness(); + const archive = await makeSkillArchive({}); + const begin = await call(handlers, "skills.upload.begin", { + kind: "skill-archive", + slug: "pending-skill", + sizeBytes: archive.length, + }); + const uploadId = (begin.payload as { uploadId: string }).uploadId; + + const pending = await call(handlers, "skills.install", { + source: "upload", + uploadId, + slug: "pending-skill", + }); + expect(pending.ok).toBe(false); + expect(pending.error?.message).toContain("upload is not committed"); + + const missing = await call(handlers, "skills.install", { + source: "upload", + uploadId: randomUUID(), + slug: "missing-skill", + }); + expect(missing.ok).toBe(false); + expect(missing.error?.message).toContain("upload not found"); + }); + + it("binds slug and force to begin parameters", async () => { + const { handlers } = await makeHarness(); + const archive = await makeSkillArchive({}); + const first = await uploadArchive(handlers, { + archive, + slug: "bound-skill", + }); + + const slugSwitch = await call(handlers, "skills.install", { + source: "upload", + uploadId: first.uploadId, + slug: "other-skill", + }); + expect(slugSwitch.ok).toBe(false); + expect(slugSwitch.error?.message).toContain("install slug does not match upload slug"); + + const second = await uploadArchive(handlers, { + archive, + slug: "forced-skill", + force: true, + }); + const forceSwitch = await call(handlers, "skills.install", { + source: "upload", + uploadId: second.uploadId, + slug: "forced-skill", + }); + expect(forceSwitch.ok).toBe(false); + expect(forceSwitch.error?.message).toContain("install force does not match upload force"); + }); + + it("rejects install sha mismatch and removes the terminal upload", async () => { + const { handlers, stateDir } = await makeHarness(); + const upload = await uploadArchive(handlers, { + archive: await makeSkillArchive({}), + slug: "sha-bound-skill", + }); + + const install = await call(handlers, "skills.install", { + source: "upload", + uploadId: upload.uploadId, + slug: "sha-bound-skill", + sha256: "0".repeat(64), + }); + + expect(install.ok).toBe(false); + expect(install.error).toMatchObject({ + code: "INVALID_REQUEST", + message: "install sha256 does not match uploaded archive", + }); + await expect( + fs.stat(path.join(stateDir, "tmp", "skill-uploads", upload.uploadId)), + ).rejects.toMatchObject({ code: "ENOENT" }); + }); + + it("rejects expired committed uploads through skills.install", async () => { + const { handlers, stateDir } = await makeHarness(); + const upload = await uploadArchive(handlers, { + archive: await makeSkillArchive({}), + slug: "expired-skill", + }); + const metadataPath = path.join( + stateDir, + "tmp", + "skill-uploads", + upload.uploadId, + "metadata.json", + ); + const metadata = JSON.parse(await fs.readFile(metadataPath, "utf8")) as { expiresAt: number }; + metadata.expiresAt = Date.now() - 1; + await fs.writeFile(metadataPath, `${JSON.stringify(metadata, null, 2)}\n`, "utf8"); + + const install = await call(handlers, "skills.install", { + source: "upload", + uploadId: upload.uploadId, + slug: "expired-skill", + }); + + expect(install.ok).toBe(false); + expect(install.error).toMatchObject({ + code: "INVALID_REQUEST", + message: "upload has expired", + }); + await expect( + fs.stat(path.join(stateDir, "tmp", "skill-uploads", upload.uploadId)), + ).rejects.toMatchObject({ code: "ENOENT" }); + }); + + it("rejects invalid slugs, missing SKILL.md, and archive traversal", async () => { + const { handlers, stateDir, workspaceDir } = await makeHarness(); + const invalidSlug = await call(handlers, "skills.upload.begin", { + kind: "skill-archive", + slug: "../escape", + sizeBytes: 1, + }); + expect(invalidSlug.ok).toBe(false); + expect(invalidSlug.error?.message).toContain("Invalid skill slug"); + + const missingSkill = await uploadArchive(handlers, { + archive: await makeSkillArchive({ missingSkill: true }), + slug: "missing-skill-md", + }); + const missingInstall = await call(handlers, "skills.install", { + source: "upload", + uploadId: missingSkill.uploadId, + slug: "missing-skill-md", + }); + expect(missingInstall.ok).toBe(false); + expect(missingInstall.error?.code).toBe("INVALID_REQUEST"); + expect(missingInstall.error?.message).toContain("SKILL.md"); + await expect( + fs.stat(path.join(stateDir, "tmp", "skill-uploads", missingSkill.uploadId)), + ).rejects.toMatchObject({ code: "ENOENT" }); + + const legacyMarker = await uploadArchive(handlers, { + archive: await makeSkillArchive({ + rootDir: "legacy-root", + skillFileName: "skills.md", + }), + slug: "legacy-marker", + }); + const legacyMarkerInstall = await call(handlers, "skills.install", { + source: "upload", + uploadId: legacyMarker.uploadId, + slug: "legacy-marker", + }); + expect(legacyMarkerInstall.ok).toBe(false); + expect(legacyMarkerInstall.error?.code).toBe("INVALID_REQUEST"); + expect(legacyMarkerInstall.error?.message).toContain("SKILL.md"); + await expect( + fs.stat(path.join(stateDir, "tmp", "skill-uploads", legacyMarker.uploadId)), + ).rejects.toMatchObject({ code: "ENOENT" }); + + const traversal = await uploadArchive(handlers, { + archive: await makeSkillArchive({ traversal: true }), + slug: "traversal-skill", + }); + const traversalInstall = await call(handlers, "skills.install", { + source: "upload", + uploadId: traversal.uploadId, + slug: "traversal-skill", + }); + expect(traversalInstall.ok).toBe(false); + expect(traversalInstall.error?.code).toBe("INVALID_REQUEST"); + expect(traversalInstall.error?.message).toMatch( + /escapes destination|absolute|extract archive/i, + ); + await expect( + fs.stat(path.join(workspaceDir, "skills", "traversal-skill")), + ).rejects.toMatchObject({ code: "ENOENT" }); + }); + + it("treats security scan blocks as terminal invalid uploads", async () => { + const { handlers, stateDir } = await makeHarness(); + installSecurityScanState.scanSkillInstallSource.mockResolvedValueOnce({ + blocked: { + code: "security_scan_blocked", + reason: + 'Skill "scan-blocked" installation blocked: blocked dependencies "plain-crypto-js" declared in package.json.', + }, + }); + const upload = await uploadArchive(handlers, { + archive: await makeSkillArchive({}), + slug: "scan-blocked", + }); + + const install = await call(handlers, "skills.install", { + source: "upload", + uploadId: upload.uploadId, + slug: "scan-blocked", + }); + + expect(install.ok).toBe(false); + expect(install.error).toMatchObject({ + code: "INVALID_REQUEST", + message: expect.stringContaining("blocked dependencies"), + }); + expect(installSecurityScanState.scanSkillInstallSource).toHaveBeenCalledWith( + expect.objectContaining({ + origin: "skill-upload", + skillName: "scan-blocked", + }), + ); + await expect( + fs.stat(path.join(stateDir, "tmp", "skill-uploads", upload.uploadId)), + ).rejects.toMatchObject({ code: "ENOENT" }); + }); + + it("preserves existing installs unless force was bound at begin", async () => { + const { handlers, stateDir, workspaceDir } = await makeHarness(); + const first = await uploadArchive(handlers, { + archive: await makeSkillArchive({ + name: "Replace Demo", + body: "first version", + }), + slug: "replace-demo", + }); + expect( + ( + await call(handlers, "skills.install", { + source: "upload", + uploadId: first.uploadId, + slug: "replace-demo", + }) + ).ok, + ).toBe(true); + + const blocked = await uploadArchive(handlers, { + archive: await makeSkillArchive({ + name: "Replace Demo", + body: "second version", + }), + slug: "replace-demo", + }); + const blockedInstall = await call(handlers, "skills.install", { + source: "upload", + uploadId: blocked.uploadId, + slug: "replace-demo", + }); + expect(blockedInstall.ok).toBe(false); + expect(blockedInstall.error?.code).toBe("INVALID_REQUEST"); + expect(blockedInstall.error?.message).toContain("already exists"); + await expect( + fs.stat(path.join(stateDir, "tmp", "skill-uploads", blocked.uploadId)), + ).rejects.toMatchObject({ code: "ENOENT" }); + + const forced = await uploadArchive(handlers, { + archive: await makeSkillArchive({ + name: "Replace Demo", + body: "second version", + }), + slug: "replace-demo", + force: true, + }); + const forcedInstall = await call(handlers, "skills.install", { + source: "upload", + uploadId: forced.uploadId, + slug: "replace-demo", + force: true, + }); + expect(forcedInstall.ok).toBe(true); + await expect( + fs.readFile(path.join(workspaceDir, "skills", "replace-demo", "SKILL.md"), "utf8"), + ).resolves.toContain("second version"); + }); + + it("keeps the previous skill when force replacement publish fails", async () => { + const { handlers, stateDir, workspaceDir } = await makeHarness(); + const first = await uploadArchive(handlers, { + archive: await makeSkillArchive({ + name: "Rollback Demo", + body: "first version", + }), + slug: "rollback-demo", + }); + expect( + ( + await call(handlers, "skills.install", { + source: "upload", + uploadId: first.uploadId, + slug: "rollback-demo", + }) + ).ok, + ).toBe(true); + replaceFileState.publishFailureTarget = path.join( + await fs.realpath(path.join(workspaceDir, "skills")), + "rollback-demo", + ); + + const forced = await uploadArchive(handlers, { + archive: await makeSkillArchive({ + name: "Rollback Demo", + body: "second version", + }), + slug: "rollback-demo", + force: true, + }); + + const install = await call(handlers, "skills.install", { + source: "upload", + uploadId: forced.uploadId, + slug: "rollback-demo", + force: true, + }); + + expect(install.ok).toBe(false); + expect(install.error?.code).toBe("UNAVAILABLE"); + expect(install.error?.message).toContain("publish boom"); + await expect( + fs.readFile(path.join(workspaceDir, "skills", "rollback-demo", "SKILL.md"), "utf8"), + ).resolves.toContain("first version"); + await expect( + fs.stat(path.join(stateDir, "tmp", "skill-uploads", forced.uploadId)), + ).resolves.toBeTruthy(); + }); +}); diff --git a/src/gateway/server-methods/skills-upload.ts b/src/gateway/server-methods/skills-upload.ts new file mode 100644 index 00000000000..c1d53b354ea --- /dev/null +++ b/src/gateway/server-methods/skills-upload.ts @@ -0,0 +1,216 @@ +import type { ValidateFunction } from "ajv"; +import { + installSkillArchiveFromPath, + type SkillArchiveInstallFailureKind, + validateRequestedSkillSlug, +} from "../../agents/skills-archive-install.js"; +import type { OpenClawConfig } from "../../config/types.openclaw.js"; +import { formatErrorMessage } from "../../infra/errors.js"; +import { + ErrorCodes, + errorShape, + formatValidationErrors, + validateSkillsUploadBeginParams, + validateSkillsUploadChunkParams, + validateSkillsUploadCommitParams, +} from "../protocol/index.js"; +import type { ErrorShape } from "../protocol/index.js"; +import { + defaultSkillUploadStore, + normalizeSkillUploadSha256, + SkillUploadRequestError, + type SkillUploadStore, +} from "./skills-upload-store.js"; +import type { GatewayRequestContext } from "./types.js"; +import type { GatewayRequestHandlers } from "./types.js"; + +type UploadInstallErrorCode = typeof ErrorCodes.INVALID_REQUEST | typeof ErrorCodes.UNAVAILABLE; + +const UPLOADED_SKILL_ARCHIVES_DISABLED_MESSAGE = + "Uploaded skill archive installs are disabled by skills.install.allowUploadedArchives"; + +export function areUploadedSkillArchivesEnabled(config: OpenClawConfig): boolean { + return config.skills?.install?.allowUploadedArchives === true; +} + +export type UploadedSkillInstallResult = + | { + ok: true; + message: string; + stdout: string; + stderr: string; + code: 0; + slug: string; + targetDir: string; + sha256: string; + } + | { + ok: false; + error: string; + errorCode: UploadInstallErrorCode; + }; + +function uploadErrorShape( + prefix: string, + errors: Parameters[0], +): ErrorShape { + return errorShape(ErrorCodes.INVALID_REQUEST, `${prefix}: ${formatValidationErrors(errors)}`); +} + +function mapUploadError(err: unknown): ErrorShape { + if (err instanceof SkillUploadRequestError) { + return errorShape(ErrorCodes.INVALID_REQUEST, err.message); + } + return errorShape(ErrorCodes.UNAVAILABLE, formatErrorMessage(err)); +} + +function uploadInstallFailureErrorCode( + failureKind: SkillArchiveInstallFailureKind, +): UploadInstallErrorCode { + return failureKind === "invalid-request" ? ErrorCodes.INVALID_REQUEST : ErrorCodes.UNAVAILABLE; +} + +export const skillsUploadHandlers: GatewayRequestHandlers = { + "skills.upload.begin": makeUploadHandler( + "skills.upload.begin", + validateSkillsUploadBeginParams, + (params) => defaultSkillUploadStore.begin(params), + ), + "skills.upload.chunk": makeUploadHandler( + "skills.upload.chunk", + validateSkillsUploadChunkParams, + (params) => defaultSkillUploadStore.chunk(params), + ), + "skills.upload.commit": makeUploadHandler( + "skills.upload.commit", + validateSkillsUploadCommitParams, + (params) => defaultSkillUploadStore.commit(params), + ), +}; + +function makeUploadHandler( + name: string, + validator: ValidateFunction

, + action: (params: P) => Promise, +): GatewayRequestHandlers[string] { + return async ({ params, respond, context }) => { + if (!areUploadedSkillArchivesEnabled(context.getRuntimeConfig())) { + respond( + false, + undefined, + errorShape(ErrorCodes.UNAVAILABLE, UPLOADED_SKILL_ARCHIVES_DISABLED_MESSAGE), + ); + return; + } + if (!validator(params)) { + respond(false, undefined, uploadErrorShape(`invalid ${name} params`, validator.errors)); + return; + } + try { + respond(true, await action(params), undefined); + } catch (err) { + respond(false, undefined, mapUploadError(err)); + } + }; +} + +export async function installUploadedSkillArchive(params: { + uploadId: string; + slug: string; + force: boolean; + sha256?: string; + timeoutMs?: number; + workspaceDir: string; + context: GatewayRequestContext; + store?: SkillUploadStore; +}): Promise { + const store = params.store ?? defaultSkillUploadStore; + if (!areUploadedSkillArchivesEnabled(params.context.getRuntimeConfig())) { + return { + ok: false, + error: UPLOADED_SKILL_ARCHIVES_DISABLED_MESSAGE, + errorCode: ErrorCodes.UNAVAILABLE, + }; + } + try { + const requestedSlug = validateRequestedSkillSlug(params.slug); + const requestedSha = normalizeSkillUploadSha256(params.sha256); + return await store.withCommittedUpload(params.uploadId, async (record, upload) => { + const rejectInvalid = async (error: string): Promise => { + await upload.remove().catch(() => undefined); + return { ok: false, error, errorCode: ErrorCodes.INVALID_REQUEST }; + }; + if (record.kind !== "skill-archive") { + return await rejectInvalid("unsupported upload kind"); + } + if (record.slug !== requestedSlug) { + return await rejectInvalid("install slug does not match upload slug"); + } + if (record.force !== params.force) { + return await rejectInvalid("install force does not match upload force"); + } + if (requestedSha && requestedSha !== record.actualSha256) { + return await rejectInvalid("install sha256 does not match uploaded archive"); + } + if (!record.actualSha256) { + return await rejectInvalid("committed upload is missing sha256"); + } + + const install = await installSkillArchiveFromPath({ + archivePath: record.archivePath, + workspaceDir: params.workspaceDir, + slug: record.slug, + force: record.force, + timeoutMs: params.timeoutMs, + logger: params.context.logGateway, + scan: { + installId: "upload", + origin: "skill-upload", + }, + }); + if (!install.ok) { + const errorCode = uploadInstallFailureErrorCode(install.failureKind); + if (install.failureKind === "invalid-request") { + await upload.remove().catch(() => undefined); + } + return { + ok: false, + error: install.error, + errorCode, + }; + } + await upload.remove().catch(() => undefined); + return { + ok: true, + message: `Installed ${record.slug}`, + stdout: "", + stderr: "", + code: 0, + slug: record.slug, + targetDir: install.targetDir, + sha256: record.actualSha256, + }; + }); + } catch (err) { + if (err instanceof SkillUploadRequestError) { + return { + ok: false, + error: err.message, + errorCode: ErrorCodes.INVALID_REQUEST, + }; + } + const error = formatErrorMessage(err); + if (error.startsWith("Invalid skill slug")) { + return { + ok: false, + error, + errorCode: ErrorCodes.INVALID_REQUEST, + }; + } + return { + ok: false, + error, + errorCode: ErrorCodes.UNAVAILABLE, + }; + } +} diff --git a/src/gateway/server-methods/skills.ts b/src/gateway/server-methods/skills.ts index 78d89acb75f..902f228da89 100644 --- a/src/gateway/server-methods/skills.ts +++ b/src/gateway/server-methods/skills.ts @@ -33,6 +33,7 @@ import { validateSkillsStatusParams, validateSkillsUpdateParams, } from "../protocol/index.js"; +import { installUploadedSkillArchive, skillsUploadHandlers } from "./skills-upload.js"; import type { GatewayRequestHandlers } from "./types.js"; function collectSkillBins(entries: SkillEntry[]): string[] { @@ -67,6 +68,7 @@ function collectSkillBins(entries: SkillEntry[]): string[] { } export const skillsHandlers: GatewayRequestHandlers = { + ...skillsUploadHandlers, "skills.status": ({ params, respond, context }) => { if (!validateSkillsStatusParams(params)) { respond( @@ -218,6 +220,31 @@ export const skillsHandlers: GatewayRequestHandlers = { ); return; } + if (params && typeof params === "object" && "source" in params && params.source === "upload") { + const p = params as { + source: "upload"; + uploadId: string; + slug: string; + force?: boolean; + sha256?: string; + timeoutMs?: number; + }; + const result = await installUploadedSkillArchive({ + uploadId: p.uploadId, + slug: p.slug, + force: Boolean(p.force), + sha256: p.sha256, + timeoutMs: p.timeoutMs, + workspaceDir: workspaceDirRaw, + context, + }); + respond( + result.ok, + result, + result.ok ? undefined : errorShape(result.errorCode, result.error), + ); + return; + } const p = params as { name: string; installId: string; diff --git a/src/infra/install-flow.ts b/src/infra/install-flow.ts index cda8a5f9a02..dc188e7324d 100644 --- a/src/infra/install-flow.ts +++ b/src/infra/install-flow.ts @@ -33,7 +33,7 @@ export async function withExtractedArchiveRoot( tempDirPrefix: string; timeoutMs: number; logger?: ArchiveLogger; - rootMarkers?: string[]; + rootMarkers?: readonly string[]; onExtracted: (rootDir: string) => Promise; }): Promise { return await withTempDir(params.tempDirPrefix, async (tmpDir) => { @@ -55,7 +55,7 @@ export async function withExtractedArchiveRoot( let rootDir = ""; try { rootDir = await resolvePackedRootDir(extractDir, { - rootMarkers: params.rootMarkers, + rootMarkers: params.rootMarkers ? [...params.rootMarkers] : undefined, }); } catch (err) { return { ok: false, error: String(err) }; diff --git a/test/release-check.test.ts b/test/release-check.test.ts index 703ca56b9e0..f19dba262d7 100644 --- a/test/release-check.test.ts +++ b/test/release-check.test.ts @@ -492,6 +492,7 @@ describe("collectMissingPackPaths", () => { "scripts/lib/official-external-channel-catalog.json", "scripts/lib/official-external-plugin-catalog.json", "scripts/lib/official-external-provider-catalog.json", + "scripts/lib/bundled-runtime-deps-install.mjs", "scripts/lib/package-dist-imports.mjs", "scripts/postinstall-bundled-plugins.mjs", "dist/task-registry-control.runtime.js", @@ -523,6 +524,7 @@ describe("collectMissingPackPaths", () => { "scripts/lib/official-external-channel-catalog.json", "scripts/lib/official-external-plugin-catalog.json", "scripts/lib/official-external-provider-catalog.json", + "scripts/lib/bundled-runtime-deps-install.mjs", "scripts/lib/package-dist-imports.mjs", "scripts/postinstall-bundled-plugins.mjs", "dist/plugin-sdk/root-alias.cjs", diff --git a/test/scripts/docker-e2e-plan.test.ts b/test/scripts/docker-e2e-plan.test.ts index b46b62d15f9..9e433d76173 100644 --- a/test/scripts/docker-e2e-plan.test.ts +++ b/test/scripts/docker-e2e-plan.test.ts @@ -267,6 +267,16 @@ describe("scripts/lib/docker-e2e-plan", () => { timeoutMs: 1_800_000, weight: 3, }, + { + command: "OPENCLAW_SKIP_DOCKER_BUILD=1 pnpm test:docker:skill-install", + imageKind: "bare", + live: false, + name: "skill-install", + resources: ["docker", "npm"], + stateScenario: "empty", + timeoutMs: 600_000, + weight: 2, + }, { command: "OPENCLAW_SKIP_DOCKER_BUILD=1 pnpm test:docker:upgrade-survivor", imageKind: "bare", @@ -435,6 +445,7 @@ describe("scripts/lib/docker-e2e-plan", () => { "npm-onboard-slack-channel-agent", "doctor-switch", "update-channel-switch", + "skill-install", "upgrade-survivor", "published-upgrade-survivor", "update-restart-auth", @@ -701,6 +712,7 @@ describe("scripts/lib/docker-e2e-plan", () => { "bundled-plugin-install-uninstall-0", "commitments-safety", "update-channel-switch", + "skill-install", "upgrade-survivor", ], }); @@ -726,6 +738,7 @@ describe("scripts/lib/docker-e2e-plan", () => { { name: "bundled-plugin-install-uninstall-0", stateScenario: "empty" }, { name: "commitments-safety", stateScenario: "empty" }, { name: "update-channel-switch", stateScenario: "update-stable" }, + { name: "skill-install", stateScenario: "empty" }, { name: "upgrade-survivor", stateScenario: "upgrade-survivor" }, ]); }); diff --git a/test/scripts/lint-suppressions.test.ts b/test/scripts/lint-suppressions.test.ts index 3ea0029d153..a67cf4f2884 100644 --- a/test/scripts/lint-suppressions.test.ts +++ b/test/scripts/lint-suppressions.test.ts @@ -91,7 +91,6 @@ describe("production lint suppressions", () => { "scripts/lib/plugin-npm-release.ts|typescript/no-unnecessary-type-parameters|1", "src/agents/agent-scope.ts|no-control-regex|1", "src/agents/pi-embedded-runner/run/images.ts|no-control-regex|1", - "src/agents/skills-clawhub.ts|no-control-regex|1", "src/agents/subagent-attachments.ts|no-control-regex|1", "src/agents/subagent-spawn.ts|no-control-regex|1", "src/channels/plugins/channel-runtime-surface.types.ts|typescript/no-unnecessary-type-parameters|1", diff --git a/test/scripts/package-acceptance-workflow.test.ts b/test/scripts/package-acceptance-workflow.test.ts index 05e9d10940c..26971fcacec 100644 --- a/test/scripts/package-acceptance-workflow.test.ts +++ b/test/scripts/package-acceptance-workflow.test.ts @@ -109,7 +109,8 @@ describe("package acceptance workflow", () => { expect(workflow).toContain('"all-since-"'); expect(workflow).toContain("npm-onboard-channel-agent gateway-network config-reload"); expect(workflow).toContain("npm-onboard-channel-agent doctor-switch"); - expect(workflow).toContain("update-channel-switch update-corrupt-plugin upgrade-survivor"); + expect(workflow).toContain("update-channel-switch skill-install update-corrupt-plugin"); + expect(workflow).toContain("update-corrupt-plugin upgrade-survivor"); expect(workflow).toContain("published-upgrade-survivor"); expect(workflow).toContain("published-upgrade-survivor update-restart-auth"); expect(workflow).toContain("plugins-offline plugin-update"); @@ -558,7 +559,7 @@ describe("package artifact reuse", () => { ); expect(workflow).toContain("suite_profile: custom"); expect(workflow).toContain( - "docker_lanes: doctor-switch update-channel-switch update-corrupt-plugin upgrade-survivor published-upgrade-survivor update-restart-auth plugins-offline plugin-update", + "docker_lanes: doctor-switch update-channel-switch skill-install update-corrupt-plugin upgrade-survivor published-upgrade-survivor update-restart-auth plugins-offline plugin-update", ); expect(workflow).toContain( "published_upgrade_survivor_baselines: ${{ needs.resolve_target.outputs.run_release_soak == 'true' && 'last-stable-4 2026.4.23 2026.5.2 2026.4.15' || '' }}",