Skip to content

Commit b697b89

Browse files
committed
fix(openai-transport): handle Mistral reasoning_content as array/object in completions stream (#67192)
Mistral's OpenAI-compatible streaming endpoint returns reasoning_content as an array of block objects rather than a flat string: {delta: {reasoning_content: [{type: "thinking", content: "..."}]}} The existing getCompletionsReasoningDelta() only checked typeof value === "string", so the array/object fell through unhandled. When the value was later used in a template literal or concatenation, JavaScript coerced it to "[object Object]", corrupting the stream and crashing the channel with repeated [object Object] output. Fix: extend getCompletionsReasoningDelta() to handle three formats for the reasoning_content / reasoning / reasoning_text fields: 1. string (existing, unchanged) 2. array of block objects: [{type:"thinking",content:"..."} | {type:"text",text:"..."} | string] 3. single plain object: {type:"thinking",content:"..."} or {type:"text",text:"..."} Adds 4 tests in openai-transport-stream.test.ts covering array blocks, plain object, string backward compat, and empty array (no crash).
1 parent 610dca9 commit b697b89

2 files changed

Lines changed: 258 additions & 0 deletions

File tree

src/agents/openai-transport-stream.test.ts

Lines changed: 221 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2018,3 +2018,224 @@ describe("openai transport stream", () => {
20182018
]);
20192019
});
20202020
});
2021+
2022+
describe("Mistral reasoning_content object/array handling (#67192)", () => {
2023+
// Shared model fixture for Mistral completions
2024+
const mistralModel = {
2025+
id: "mistral-small-latest",
2026+
provider: "mistral",
2027+
api: "openai-completions" as const,
2028+
baseUrl: "https://api.mistral.ai/v1",
2029+
reasoning: true,
2030+
input: ["text" as const],
2031+
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
2032+
contextWindow: 128000,
2033+
maxTokens: 16384,
2034+
};
2035+
2036+
function makeOutput() {
2037+
return {
2038+
role: "assistant" as const,
2039+
content: [],
2040+
api: mistralModel.api,
2041+
provider: mistralModel.provider,
2042+
model: mistralModel.id,
2043+
usage: {
2044+
input: 0,
2045+
output: 0,
2046+
cacheRead: 0,
2047+
cacheWrite: 0,
2048+
totalTokens: 0,
2049+
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },
2050+
},
2051+
stopReason: "stop",
2052+
timestamp: Date.now(),
2053+
};
2054+
}
2055+
2056+
it("extracts reasoning from reasoning_content array of block objects", async () => {
2057+
const output = makeOutput();
2058+
const stream: { push(event: unknown): void } = { push() {} };
2059+
2060+
const mockChunks = [
2061+
{
2062+
id: "cmpl-mistral-array",
2063+
object: "chat.completion.chunk" as const,
2064+
choices: [
2065+
{
2066+
index: 0,
2067+
delta: {
2068+
reasoning_content: [
2069+
{ type: "thinking", content: "Let me think about this." },
2070+
{ type: "thinking", content: " More reasoning." },
2071+
],
2072+
} as Record<string, unknown>,
2073+
logprobs: null,
2074+
finish_reason: null,
2075+
},
2076+
],
2077+
},
2078+
{
2079+
id: "cmpl-mistral-text",
2080+
object: "chat.completion.chunk" as const,
2081+
choices: [
2082+
{
2083+
index: 0,
2084+
delta: { content: "Here is the answer." },
2085+
logprobs: null,
2086+
finish_reason: "stop",
2087+
},
2088+
],
2089+
},
2090+
];
2091+
2092+
async function* mockStream() {
2093+
for (const chunk of mockChunks) {
2094+
yield chunk as never;
2095+
}
2096+
}
2097+
2098+
await __testing.processOpenAICompletionsStream(mockStream(), output, mistralModel, stream);
2099+
2100+
expect(output.content).toMatchObject([
2101+
{ type: "thinking", thinking: "Let me think about this. More reasoning." },
2102+
{ type: "text", text: "Here is the answer." },
2103+
]);
2104+
});
2105+
2106+
it("extracts reasoning from reasoning_content plain object (single block)", async () => {
2107+
const output = makeOutput();
2108+
const stream: { push(event: unknown): void } = { push() {} };
2109+
2110+
const mockChunks = [
2111+
{
2112+
id: "cmpl-mistral-obj",
2113+
object: "chat.completion.chunk" as const,
2114+
choices: [
2115+
{
2116+
index: 0,
2117+
delta: {
2118+
reasoning_content: { type: "thinking", content: "Single block reasoning." },
2119+
} as Record<string, unknown>,
2120+
logprobs: null,
2121+
finish_reason: null,
2122+
},
2123+
],
2124+
},
2125+
{
2126+
id: "cmpl-mistral-text",
2127+
object: "chat.completion.chunk" as const,
2128+
choices: [
2129+
{
2130+
index: 0,
2131+
delta: { content: "Answer." },
2132+
logprobs: null,
2133+
finish_reason: "stop",
2134+
},
2135+
],
2136+
},
2137+
];
2138+
2139+
async function* mockStream() {
2140+
for (const chunk of mockChunks) {
2141+
yield chunk as never;
2142+
}
2143+
}
2144+
2145+
await __testing.processOpenAICompletionsStream(mockStream(), output, mistralModel, stream);
2146+
2147+
expect(output.content).toMatchObject([
2148+
{ type: "thinking", thinking: "Single block reasoning." },
2149+
{ type: "text", text: "Answer." },
2150+
]);
2151+
});
2152+
2153+
it("still handles reasoning_content as a plain string (backward compat)", async () => {
2154+
const output = makeOutput();
2155+
const stream: { push(event: unknown): void } = { push() {} };
2156+
2157+
const mockChunks = [
2158+
{
2159+
id: "cmpl-mistral-str",
2160+
object: "chat.completion.chunk" as const,
2161+
choices: [
2162+
{
2163+
index: 0,
2164+
delta: { reasoning_content: "String reasoning." } as Record<string, unknown>,
2165+
logprobs: null,
2166+
finish_reason: null,
2167+
},
2168+
],
2169+
},
2170+
{
2171+
id: "cmpl-mistral-end",
2172+
object: "chat.completion.chunk" as const,
2173+
choices: [
2174+
{
2175+
index: 0,
2176+
delta: { content: "Done." },
2177+
logprobs: null,
2178+
finish_reason: "stop",
2179+
},
2180+
],
2181+
},
2182+
];
2183+
2184+
async function* mockStream() {
2185+
for (const chunk of mockChunks) {
2186+
yield chunk as never;
2187+
}
2188+
}
2189+
2190+
await __testing.processOpenAICompletionsStream(mockStream(), output, mistralModel, stream);
2191+
2192+
expect(output.content).toMatchObject([
2193+
{ type: "thinking", thinking: "String reasoning." },
2194+
{ type: "text", text: "Done." },
2195+
]);
2196+
});
2197+
2198+
it("ignores empty reasoning_content arrays without crashing", async () => {
2199+
const output = makeOutput();
2200+
const stream: { push(event: unknown): void } = { push() {} };
2201+
2202+
const mockChunks = [
2203+
{
2204+
id: "cmpl-mistral-empty",
2205+
object: "chat.completion.chunk" as const,
2206+
choices: [
2207+
{
2208+
index: 0,
2209+
delta: { reasoning_content: [] } as Record<string, unknown>,
2210+
logprobs: null,
2211+
finish_reason: null,
2212+
},
2213+
],
2214+
},
2215+
{
2216+
id: "cmpl-mistral-end",
2217+
object: "chat.completion.chunk" as const,
2218+
choices: [
2219+
{
2220+
index: 0,
2221+
delta: { content: "Just text, no reasoning." },
2222+
logprobs: null,
2223+
finish_reason: "stop",
2224+
},
2225+
],
2226+
},
2227+
];
2228+
2229+
async function* mockStream() {
2230+
for (const chunk of mockChunks) {
2231+
yield chunk as never;
2232+
}
2233+
}
2234+
2235+
await __testing.processOpenAICompletionsStream(mockStream(), output, mistralModel, stream);
2236+
2237+
// Should have only text, no thinking block
2238+
expect(output.content).toMatchObject([{ type: "text", text: "Just text, no reasoning." }]);
2239+
expect(output.content.some((b: { type: string }) => b.type === "thinking")).toBe(false);
2240+
});
2241+
});

src/agents/openai-transport-stream.ts

Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1218,6 +1218,43 @@ function getCompletionsReasoningDelta(delta: Record<string, unknown>): {
12181218
if (typeof value === "string" && value.length > 0) {
12191219
return { signature: field, text: value };
12201220
}
1221+
// Mistral returns reasoning_content as an array of block objects:
1222+
// [{type: "thinking", content: "..."}, ...]
1223+
// or occasionally as a plain object {type: "thinking", content: "..."}
1224+
// rather than a flat string. Without this branch the value coerces to
1225+
// "[object Object]" when used in a template literal, corrupting the stream
1226+
// and crashing the channel. (#67192)
1227+
if (Array.isArray(value)) {
1228+
let text = "";
1229+
for (const item of value) {
1230+
if (item && typeof item === "object") {
1231+
const block = item as Record<string, unknown>;
1232+
// Mistral block shapes: {type:"thinking",content:"..."} or {type:"text",text:"..."}
1233+
if (typeof block.content === "string" && block.content.length > 0) {
1234+
text += block.content;
1235+
} else if (typeof block.text === "string" && block.text.length > 0) {
1236+
text += block.text;
1237+
}
1238+
} else if (typeof item === "string" && item.length > 0) {
1239+
text += item;
1240+
}
1241+
}
1242+
if (text.length > 0) {
1243+
return { signature: field, text };
1244+
}
1245+
} else if (value !== null && value !== undefined && typeof value === "object") {
1246+
// Plain object block: {type: "thinking", content: "..."}
1247+
const block = value as Record<string, unknown>;
1248+
const text =
1249+
typeof block.content === "string"
1250+
? block.content
1251+
: typeof block.text === "string"
1252+
? block.text
1253+
: undefined;
1254+
if (text && text.length > 0) {
1255+
return { signature: field, text };
1256+
}
1257+
}
12211258
}
12221259
return null;
12231260
}

0 commit comments

Comments
 (0)