Files
moltbot/extensions/github-copilot/model-metadata.ts
Peter Steinberger 0b8ee4616d fix(github-copilot): support Gemini image understanding
Fixes Copilot image understanding by exchanging OAuth tokens for Copilot API tokens, routing Copilot Gemini image requests through Chat Completions, and sending the prompt in user content with Copilot vision headers.

Real behavior proof:
- Old Responses route with real Copilot key reproduced `400 model gemini-3.1-pro-preview does not support Responses API`.
- Fixed route with the same real Copilot key returned `Cat`.
- Final CLI live smoke returned `ok: true` and `text: Cat` for `github-copilot/gemini-3.1-pro-preview`.

Verification:
- pnpm test src/media-understanding/image.test.ts extensions/github-copilot/models.test.ts extensions/github-copilot/stream.test.ts src/agents/pi-hooks/compaction-safeguard.test.ts -- --reporter=verbose
- pnpm check:changed via Blacksmith Testbox tbx_01krgt56pqmft8txekt017wke6, Actions run https://github.com/openclaw/openclaw/actions/runs/25803926150, exit 0.

Refs #80393, #80442.

Co-authored-by: Yang Haoyu <150496764+afunnyhy@users.noreply.github.com>
2026-05-13 15:20:27 +01:00

52 lines
1.6 KiB
TypeScript

import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-model-shared";
import { normalizeOptionalLowercaseString } from "openclaw/plugin-sdk/string-coerce-runtime";
type CopilotRuntimeApi = "anthropic-messages" | "openai-completions" | "openai-responses";
const COPILOT_CHAT_COMPLETIONS_COMPAT: ModelDefinitionConfig["compat"] = {
supportsStore: false,
supportsDeveloperRole: false,
supportsUsageInStreaming: false,
maxTokensField: "max_tokens",
};
const STATIC_MODEL_OVERRIDES = new Map<string, Partial<ModelDefinitionConfig>>([
[
"gpt-5.5",
{
name: "GPT-5.5",
reasoning: true,
contextWindow: 400_000,
maxTokens: 128_000,
},
],
]);
function isCopilotGeminiModelId(modelId: string): boolean {
return /(?:^|[-_.])gemini(?:$|[-_.])/.test(modelId);
}
export function resolveCopilotTransportApi(modelId: string): CopilotRuntimeApi {
const normalized = normalizeOptionalLowercaseString(modelId) ?? "";
if (normalized.includes("claude")) {
return "anthropic-messages";
}
if (isCopilotGeminiModelId(normalized)) {
return "openai-completions";
}
return "openai-responses";
}
export function resolveCopilotModelCompat(
modelId: string,
): ModelDefinitionConfig["compat"] | undefined {
const normalized = normalizeOptionalLowercaseString(modelId) ?? "";
return isCopilotGeminiModelId(normalized) ? { ...COPILOT_CHAT_COMPLETIONS_COMPAT } : undefined;
}
export function resolveStaticCopilotModelOverride(
modelId: string,
): Partial<ModelDefinitionConfig> | undefined {
return STATIC_MODEL_OVERRIDES.get(normalizeOptionalLowercaseString(modelId) ?? "");
}