fix(agents): preserve OpenAI event streams

This commit is contained in:
Peter Steinberger
2026-05-10 11:07:40 +01:00
parent 5157ee268f
commit 5d8b500929
3 changed files with 94 additions and 27 deletions

View File

@@ -12,21 +12,31 @@ export async function prepareMinGitZip(tgzDir: string): Promise<string> {
String.raw`import json
import urllib.request
req = urllib.request.Request(
"https://api.github.com/repos/git-for-windows/git/releases/latest",
headers={
"User-Agent": "openclaw-parallels-smoke",
"Accept": "application/vnd.github+json",
},
)
with urllib.request.urlopen(req, timeout=30) as response:
data = json.load(response)
assets = data.get("assets", [])
preferred_names = [
"MinGit-2.53.0.2-arm64.zip",
"MinGit-2.53.0.2-64-bit.zip",
]
fallback_urls = {
"MinGit-2.53.0.2-arm64.zip": "https://github.com/git-for-windows/git/releases/download/v2.53.0.windows.2/MinGit-2.53.0.2-arm64.zip",
"MinGit-2.53.0.2-64-bit.zip": "https://github.com/git-for-windows/git/releases/download/v2.53.0.windows.2/MinGit-2.53.0.2-64-bit.zip",
}
try:
req = urllib.request.Request(
"https://api.github.com/repos/git-for-windows/git/releases/latest",
headers={
"User-Agent": "openclaw-parallels-smoke",
"Accept": "application/vnd.github+json",
},
)
with urllib.request.urlopen(req, timeout=30) as response:
data = json.load(response)
except Exception:
print(preferred_names[0])
print(fallback_urls[preferred_names[0]])
raise SystemExit(0)
assets = data.get("assets", [])
best = None
for wanted in preferred_names:

View File

@@ -373,12 +373,12 @@ describe("buildGuardedModelFetch", () => {
});
const model = {
id: "gpt-5.4",
provider: "openai",
provider: "openrouter",
api: "openai-responses",
baseUrl: "https://api.openai.com/v1",
baseUrl: "https://openrouter.ai/api/v1",
} as unknown as Model<"openai-responses">;
const response = await buildGuardedModelFetch(model)("https://api.openai.com/v1/responses", {
const response = await buildGuardedModelFetch(model)("https://openrouter.ai/api/v1/responses", {
method: "POST",
});
const items = [];
@@ -389,6 +389,30 @@ describe("buildGuardedModelFetch", () => {
expect(items).toEqual([{ ok: true }]);
});
it("leaves official OpenAI SSE streams unmodified", async () => {
fetchWithSsrFGuardMock.mockResolvedValue({
response: new Response('event: response.created\n\ndata: {"ok": true}\n\n', {
headers: { "content-type": "text/event-stream" },
}),
finalUrl: "https://api.openai.com/v1/responses",
release: vi.fn(async () => undefined),
});
const model = {
id: "gpt-5.5",
provider: "openai",
api: "openai-responses",
baseUrl: "https://api.openai.com/v1",
} as unknown as Model<"openai-responses">;
const response = await buildGuardedModelFetch(model)("https://api.openai.com/v1/responses", {
method: "POST",
});
await expect(response.text()).resolves.toBe(
'event: response.created\n\ndata: {"ok": true}\n\n',
);
});
it("drops whitespace-only SSE data frames with CRLF delimiters", async () => {
fetchWithSsrFGuardMock.mockResolvedValue({
response: new Response('event: message\r\ndata: \r\n\r\ndata: {"ok": true}\r\n\r\n', {
@@ -399,13 +423,13 @@ describe("buildGuardedModelFetch", () => {
});
const model = {
id: "gpt-5.4",
provider: "openai",
provider: "openrouter",
api: "openai-completions",
baseUrl: "https://api.openai.com/v1",
baseUrl: "https://openrouter.ai/api/v1",
} as unknown as Model<"openai-completions">;
const response = await buildGuardedModelFetch(model)(
"https://api.openai.com/v1/chat/completions",
"https://openrouter.ai/api/v1/chat/completions",
{ method: "POST" },
);
const items = [];
@@ -448,6 +472,33 @@ describe("buildGuardedModelFetch", () => {
expect(items).toEqual([{ ok: true }]);
});
it("does not clone Request bodies while checking for streaming JSON fallbacks", async () => {
const cloneSpy = vi.spyOn(Request.prototype, "clone");
fetchWithSsrFGuardMock.mockResolvedValue({
response: new Response('{"ok": true}', {
headers: { "content-type": "application/json" },
}),
finalUrl: "https://api.openai.com/v1/responses",
release: vi.fn(async () => undefined),
});
const model = {
id: "gpt-5.5",
provider: "openai",
api: "openai-responses",
baseUrl: "https://api.openai.com/v1",
} as unknown as Model<"openai-responses">;
const request = new Request("https://api.openai.com/v1/responses", {
method: "POST",
headers: { "content-type": "application/json" },
body: JSON.stringify({ model: "gpt-5.5", stream: true }),
});
const response = await buildGuardedModelFetch(model)(request);
expect(cloneSpy).not.toHaveBeenCalled();
expect(response.headers.get("content-type")).toBe("application/json");
});
it("preserves JSON bodies when the request is not streaming", async () => {
fetchWithSsrFGuardMock.mockResolvedValue({
response: new Response('{"ok": true}', {
@@ -531,13 +582,13 @@ describe("buildGuardedModelFetch", () => {
});
const model = {
id: "gpt-5.4",
provider: "openai",
provider: "openrouter",
api: "openai-completions",
baseUrl: "https://api.openai.com/v1",
baseUrl: "https://openrouter.ai/api/v1",
} as unknown as Model<"openai-completions">;
const response = await buildGuardedModelFetch(model)(
"https://api.openai.com/v1/chat/completions",
"https://openrouter.ai/api/v1/chat/completions",
{ method: "POST" },
);
const items = [];

View File

@@ -172,6 +172,17 @@ function sanitizeOpenAISdkSseResponse(
});
}
function shouldSanitizeOpenAISdkSseResponse(model: Model<Api>): boolean {
if (model.provider !== "openai") {
return true;
}
try {
return new URL(model.baseUrl).hostname.toLowerCase() !== "api.openai.com";
} catch {
return true;
}
}
async function requestBodyHasStreamTrue(
request: Request | undefined,
init: RequestInit | undefined,
@@ -187,12 +198,7 @@ async function requestBodyHasStreamTrue(
}
let text: string | undefined;
if (request) {
text = await request
.clone()
.text()
.catch(() => undefined);
} else if (typeof init?.body === "string") {
if (typeof init?.body === "string") {
text = init.body;
}
if (!text) {
@@ -534,7 +540,7 @@ export function buildGuardedModelFetch(
result.refreshTimeout,
localServiceLease,
);
return options?.sanitizeSse === false
return options?.sanitizeSse === false || !shouldSanitizeOpenAISdkSseResponse(model)
? response
: sanitizeOpenAISdkSseResponse(response, { synthesizeJsonAsSse });
};