Skip to content

Commit 172dacd

Browse files
committed
refactor: make defaultTemperature required in getModelParams
Make the defaultTemperature parameter required in getModelParams() instead of defaulting to 0. This prevents providers with their own non-zero default temperature (like DeepSeek's 0.3) from being silently overridden by the implicit 0 default. Every provider now explicitly declares its temperature default, making the temperature resolution chain clear: user setting → model default → provider default
1 parent 8048d5e commit 172dacd

File tree

13 files changed

+64
-23
lines changed

13 files changed

+64
-23
lines changed

src/api/providers/__tests__/deepseek.spec.ts

Lines changed: 7 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -156,25 +156,18 @@ describe("DeepSeekHandler", () => {
156156
expect(model).toHaveProperty("maxTokens")
157157
})
158158

159-
it("should use DEEP_SEEK_DEFAULT_TEMPERATURE (0.3) when no custom temperature is set", () => {
160-
// Ensures the fix for defaulting to 0 instead of 0.3 is in place
161-
// See: https://github.com/RooCodeInc/Roo-Code/issues/11194
162-
const handlerWithoutTemperature = new DeepSeekHandler({
163-
...mockOptions,
164-
modelTemperature: undefined,
165-
})
166-
const model = handlerWithoutTemperature.getModel()
159+
it("should use DEEP_SEEK_DEFAULT_TEMPERATURE as the default temperature", () => {
160+
const model = handler.getModel()
167161
expect(model.temperature).toBe(DEEP_SEEK_DEFAULT_TEMPERATURE)
168162
})
169163

170-
it("should use user-provided temperature when explicitly set", () => {
171-
const customTemperature = 0.7
172-
const handlerWithTemperature = new DeepSeekHandler({
164+
it("should respect user-provided temperature over DEEP_SEEK_DEFAULT_TEMPERATURE", () => {
165+
const handlerWithTemp = new DeepSeekHandler({
173166
...mockOptions,
174-
modelTemperature: customTemperature,
167+
modelTemperature: 0.9,
175168
})
176-
const model = handlerWithTemperature.getModel()
177-
expect(model.temperature).toBe(customTemperature)
169+
const model = handlerWithTemp.getModel()
170+
expect(model.temperature).toBe(0.9)
178171
})
179172
})
180173

src/api/providers/anthropic-vertex.ts

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -231,7 +231,13 @@ export class AnthropicVertexHandler extends BaseProvider implements SingleComple
231231
}
232232
}
233233

234-
const params = getModelParams({ format: "anthropic", modelId: id, model: info, settings: this.options })
234+
const params = getModelParams({
235+
format: "anthropic",
236+
modelId: id,
237+
model: info,
238+
settings: this.options,
239+
defaultTemperature: 0,
240+
})
235241

236242
// Build betas array for request headers
237243
const betas: string[] = []

src/api/providers/anthropic.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -351,6 +351,7 @@ export class AnthropicHandler extends BaseProvider implements SingleCompletionHa
351351
modelId: id,
352352
model: info,
353353
settings: this.options,
354+
defaultTemperature: 0,
354355
})
355356

356357
// The `:thinking` suffix indicates that the model is a "Hybrid"

src/api/providers/cerebras.ts

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,13 @@ export class CerebrasHandler extends BaseProvider implements SingleCompletionHan
4949
override getModel(): { id: string; info: ModelInfo; maxTokens?: number; temperature?: number } {
5050
const id = (this.options.apiModelId ?? cerebrasDefaultModelId) as CerebrasModelId
5151
const info = cerebrasModels[id as keyof typeof cerebrasModels] || cerebrasModels[cerebrasDefaultModelId]
52-
const params = getModelParams({ format: "openai", modelId: id, model: info, settings: this.options })
52+
const params = getModelParams({
53+
format: "openai",
54+
modelId: id,
55+
model: info,
56+
settings: this.options,
57+
defaultTemperature: CEREBRAS_DEFAULT_TEMPERATURE,
58+
})
5359
return { id, info, ...params }
5460
}
5561

src/api/providers/deepinfra.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,7 @@ export class DeepInfraHandler extends RouterProvider implements SingleCompletion
4747
modelId: id,
4848
model: info,
4949
settings: this.options,
50+
defaultTemperature: 0,
5051
})
5152

5253
return { id, info, ...params }

src/api/providers/doubao.ts

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,13 @@ export class DoubaoHandler extends OpenAiHandler {
6464
override getModel() {
6565
const id = this.options.apiModelId ?? doubaoDefaultModelId
6666
const info = doubaoModels[id as keyof typeof doubaoModels] || doubaoModels[doubaoDefaultModelId]
67-
const params = getModelParams({ format: "openai", modelId: id, model: info, settings: this.options })
67+
const params = getModelParams({
68+
format: "openai",
69+
modelId: id,
70+
model: info,
71+
settings: this.options,
72+
defaultTemperature: 0,
73+
})
6874
return { id, info, ...params }
6975
}
7076

src/api/providers/mistral.ts

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,13 @@ export class MistralHandler extends BaseProvider implements SingleCompletionHand
5555
override getModel(): { id: string; info: ModelInfo; maxTokens?: number; temperature?: number } {
5656
const id = (this.options.apiModelId ?? mistralDefaultModelId) as MistralModelId
5757
const info = mistralModels[id as keyof typeof mistralModels] || mistralModels[mistralDefaultModelId]
58-
const params = getModelParams({ format: "openai", modelId: id, model: info, settings: this.options })
58+
const params = getModelParams({
59+
format: "openai",
60+
modelId: id,
61+
model: info,
62+
settings: this.options,
63+
defaultTemperature: 0,
64+
})
5965
return { id, info, ...params }
6066
}
6167

src/api/providers/moonshot.ts

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,13 @@ export class MoonshotHandler extends OpenAICompatibleHandler {
2929
override getModel() {
3030
const id = this.options.apiModelId ?? moonshotDefaultModelId
3131
const info = moonshotModels[id as keyof typeof moonshotModels] || moonshotModels[moonshotDefaultModelId]
32-
const params = getModelParams({ format: "openai", modelId: id, model: info, settings: this.options })
32+
const params = getModelParams({
33+
format: "openai",
34+
modelId: id,
35+
model: info,
36+
settings: this.options,
37+
defaultTemperature: 0,
38+
})
3339
return { id, info, ...params }
3440
}
3541

src/api/providers/openai.ts

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -282,7 +282,13 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
282282
override getModel() {
283283
const id = this.options.openAiModelId ?? ""
284284
const info: ModelInfo = this.options.openAiCustomModelInfo ?? openAiModelInfoSaneDefaults
285-
const params = getModelParams({ format: "openai", modelId: id, model: info, settings: this.options })
285+
const params = getModelParams({
286+
format: "openai",
287+
modelId: id,
288+
model: info,
289+
settings: this.options,
290+
defaultTemperature: 0,
291+
})
286292
return { id, info, ...params }
287293
}
288294

src/api/providers/requesty.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -89,6 +89,7 @@ export class RequestyHandler extends BaseProvider implements SingleCompletionHan
8989
modelId: id,
9090
model: info,
9191
settings: this.options,
92+
defaultTemperature: 0,
9293
})
9394

9495
return { id, info, ...params }

0 commit comments

Comments
 (0)