diff --git a/extensions/azure-speech/azure-speech.live.test.ts b/extensions/azure-speech/azure-speech.live.test.ts index 034882e96e9..16a71e18216 100644 --- a/extensions/azure-speech/azure-speech.live.test.ts +++ b/extensions/azure-speech/azure-speech.live.test.ts @@ -36,9 +36,7 @@ describeLive("azure speech plugin live", () => { }); expect(voices?.length).toBeGreaterThan(100); - expect(voices).toEqual( - expect.arrayContaining([expect.objectContaining({ id: "en-US-JennyNeural" })]), - ); + expect(voices?.some((voice) => voice.id === "en-US-JennyNeural")).toBe(true); }, 120_000); it("synthesizes MP3, native Ogg/Opus voice notes, and telephony audio", async () => { diff --git a/extensions/deepseek/deepseek.live.test.ts b/extensions/deepseek/deepseek.live.test.ts index c25167a2154..d5c90a0a7d6 100644 --- a/extensions/deepseek/deepseek.live.test.ts +++ b/extensions/deepseek/deepseek.live.test.ts @@ -161,20 +161,17 @@ describeLive("deepseek plugin live", () => { const messages = capturedPayload?.messages; expect(Array.isArray(messages)).toBe(true); - expect((messages as Array>)[1]).toMatchObject({ - role: "assistant", - reasoning_content: "", - tool_calls: [ - { - id: toolCallId, - type: "function", - function: { - name: "noop", - arguments: "{}", - }, - }, - ], - }); + const assistantMessage = (messages as Array>)[1]; + expect(assistantMessage?.role).toBe("assistant"); + expect(assistantMessage?.reasoning_content).toBe(""); + const toolCalls = assistantMessage?.tool_calls; + expect(Array.isArray(toolCalls)).toBe(true); + const toolCall = (toolCalls as Array>)[0]; + expect(toolCall?.id).toBe(toolCallId); + expect(toolCall?.type).toBe("function"); + const toolFunction = toolCall?.function as Record | undefined; + expect(toolFunction?.name).toBe("noop"); + expect(toolFunction?.arguments).toBe("{}"); expect(extractNonEmptyAssistantText(result.content).length).toBeGreaterThan(0); }, 60_000); @@ -226,11 +223,10 @@ describeLive("deepseek plugin live", () => { const messages = capturedPayload?.messages; expect(Array.isArray(messages)).toBe(true); - expect((messages as Array>)[1]).toMatchObject({ - role: "assistant", - content: "Hello.", - reasoning_content: "", - }); + const assistantMessage = (messages as Array>)[1]; + expect(assistantMessage?.role).toBe("assistant"); + expect(assistantMessage?.content).toBe("Hello."); + expect(assistantMessage?.reasoning_content).toBe(""); expect(extractNonEmptyAssistantText(result.content).length).toBeGreaterThan(0); }, 60_000); }); diff --git a/extensions/inworld/inworld.live.test.ts b/extensions/inworld/inworld.live.test.ts index d6cb0954a1f..279216ea0c6 100644 --- a/extensions/inworld/inworld.live.test.ts +++ b/extensions/inworld/inworld.live.test.ts @@ -27,7 +27,7 @@ describeLive("inworld plugin live", () => { }); expect(voices?.length).toBeGreaterThan(0); - expect(voices).toEqual(expect.arrayContaining([expect.objectContaining({ id: "Sarah" })])); + expect(voices?.some((voice) => voice.id === "Sarah")).toBe(true); }, 120_000); it("synthesizes MP3, native voice-note Ogg/Opus, and telephony PCM", async () => { diff --git a/extensions/ollama/ollama.live.test.ts b/extensions/ollama/ollama.live.test.ts index 3a769c95d40..ea69ea80343 100644 --- a/extensions/ollama/ollama.live.test.ts +++ b/extensions/ollama/ollama.live.test.ts @@ -142,12 +142,10 @@ describe.skipIf(!LIVE)("ollama live", () => { model?: string; outputs?: Array<{ text?: string }>; }; - expect(payload).toMatchObject({ - ok: true, - transport: "local", - provider: "ollama", - model: CHAT_MODEL, - }); + expect(payload.ok).toBe(true); + expect(payload.transport).toBe("local"); + expect(payload.provider).toBe("ollama"); + expect(payload.model).toBe(CHAT_MODEL); expect(payload.outputs?.[0]?.text?.trim().length ?? 0).toBeGreaterThan(0); }); }, 120_000); diff --git a/extensions/openai/openai-provider.live.test.ts b/extensions/openai/openai-provider.live.test.ts index ddb9f3a996b..f91cbb67062 100644 --- a/extensions/openai/openai-provider.live.test.ts +++ b/extensions/openai/openai-provider.live.test.ts @@ -169,13 +169,11 @@ describeLive("buildOpenAIProvider live", () => { model: resolved, }); - expect(normalized).toMatchObject({ - provider: "openai", - id: liveCase.modelId, - api: "openai-responses", - baseUrl: "https://api.openai.com/v1", - reasoning: liveCase.reasoning, - }); + expect(normalized?.provider).toBe("openai"); + expect(normalized?.id).toBe(liveCase.modelId); + expect(normalized?.api).toBe("openai-responses"); + expect(normalized?.baseUrl).toBe("https://api.openai.com/v1"); + expect(normalized?.reasoning).toEqual(liveCase.reasoning); const client = new OpenAI({ apiKey: OPENAI_API_KEY, diff --git a/extensions/openai/openai-tts.live.test.ts b/extensions/openai/openai-tts.live.test.ts index baa6f2b43fa..1870b604f51 100644 --- a/extensions/openai/openai-tts.live.test.ts +++ b/extensions/openai/openai-tts.live.test.ts @@ -11,7 +11,7 @@ describeLive("openai tts live", () => { const speechProvider = buildOpenAISpeechProvider(); const voices = await speechProvider.listVoices?.({}); - expect(voices).toEqual(expect.arrayContaining([expect.objectContaining({ id: "alloy" })])); + expect(voices?.some((voice) => voice.id === "alloy")).toBe(true); const providerConfig = { apiKey: OPENAI_API_KEY, diff --git a/extensions/openai/openai.live.test.ts b/extensions/openai/openai.live.test.ts index b1fc16d87ea..f3c837a2825 100644 --- a/extensions/openai/openai.live.test.ts +++ b/extensions/openai/openai.live.test.ts @@ -234,12 +234,10 @@ describeLive("openai plugin live", () => { model: resolved, }); - expect(normalized).toMatchObject({ - provider: "openai", - id: LIVE_MODEL_ID, - api: "openai-responses", - baseUrl: "https://api.openai.com/v1", - }); + expect(normalized?.provider).toBe("openai"); + expect(normalized?.id).toBe(LIVE_MODEL_ID); + expect(normalized?.api).toBe("openai-responses"); + expect(normalized?.baseUrl).toBe("https://api.openai.com/v1"); const client = new OpenAI({ apiKey: OPENAI_API_KEY, @@ -265,7 +263,7 @@ describeLive("openai plugin live", () => { if (!voices) { throw new Error("openai speech provider did not return voices"); } - expect(voices).toEqual(expect.arrayContaining([expect.objectContaining({ id: "alloy" })])); + expect(voices.some((voice) => voice.id === "alloy")).toBe(true); const cfg = createLiveConfig(); const ttsConfig = createLiveTtsConfig(); diff --git a/extensions/openrouter/openrouter.live.test.ts b/extensions/openrouter/openrouter.live.test.ts index 24a6b77fff2..56a5f2c7f84 100644 --- a/extensions/openrouter/openrouter.live.test.ts +++ b/extensions/openrouter/openrouter.live.test.ts @@ -63,12 +63,10 @@ describeLive("openrouter plugin live", () => { throw new Error(`openrouter provider did not resolve ${LIVE_MODEL_ID}`); } - expect(resolved).toMatchObject({ - provider: "openrouter", - id: LIVE_MODEL_ID, - api: "openai-completions", - baseUrl: "https://openrouter.ai/api/v1", - }); + expect(resolved.provider).toBe("openrouter"); + expect(resolved.id).toBe(LIVE_MODEL_ID); + expect(resolved.api).toBe("openai-completions"); + expect(resolved.baseUrl).toBe("https://openrouter.ai/api/v1"); const client = new OpenAI({ apiKey: OPENROUTER_API_KEY, diff --git a/extensions/vydra/vydra.live.test.ts b/extensions/vydra/vydra.live.test.ts index 95c2182a945..944049335fd 100644 --- a/extensions/vydra/vydra.live.test.ts +++ b/extensions/vydra/vydra.live.test.ts @@ -60,9 +60,7 @@ describe.skipIf(!LIVE || !VYDRA_API_KEY)("vydra live", () => { const { speechProviders } = await registerVydraPlugin(); const provider = requireRegisteredProvider(speechProviders, "vydra"); const voices = await provider.listVoices?.({}); - expect(voices).toEqual( - expect.arrayContaining([expect.objectContaining({ id: "21m00Tcm4TlvDq8ikWAM" })]), - ); + expect(voices?.some((voice) => voice.id === "21m00Tcm4TlvDq8ikWAM")).toBe(true); const result = await provider.synthesize({ text: "OpenClaw integration test OK.",