Skip to content
Snippets Groups Projects
Unverified Commit 15563a0f authored by Parham Saidi's avatar Parham Saidi Committed by GitHub
Browse files

fix: delete temp competely from params when calling openai o3 (#1629)

parent bd940d1d
No related branches found
No related tags found
No related merge requests found
---
"@llamaindex/openai": patch
---
fix: moved the temp exclusion lower level for o3 mini openai
...@@ -364,7 +364,7 @@ export class OpenAI extends ToolCallLLM<OpenAIAdditionalChatOptions> { ...@@ -364,7 +364,7 @@ export class OpenAI extends ToolCallLLM<OpenAIAdditionalChatOptions> {
const { messages, stream, tools, additionalChatOptions } = params; const { messages, stream, tools, additionalChatOptions } = params;
const baseRequestParams = <OpenAILLM.Chat.ChatCompletionCreateParams>{ const baseRequestParams = <OpenAILLM.Chat.ChatCompletionCreateParams>{
model: this.model, model: this.model,
temperature: isTemperatureSupported(this.model) ? this.temperature : null, temperature: this.temperature,
reasoning_effort: this.reasoningEffort, reasoning_effort: this.reasoningEffort,
max_tokens: this.maxTokens, max_tokens: this.maxTokens,
tools: tools?.map(OpenAI.toTool), tools: tools?.map(OpenAI.toTool),
...@@ -381,6 +381,9 @@ export class OpenAI extends ToolCallLLM<OpenAIAdditionalChatOptions> { ...@@ -381,6 +381,9 @@ export class OpenAI extends ToolCallLLM<OpenAIAdditionalChatOptions> {
delete baseRequestParams.tools; delete baseRequestParams.tools;
} }
if (!isTemperatureSupported(baseRequestParams.model))
delete baseRequestParams.temperature;
// Streaming // Streaming
if (stream) { if (stream) {
return this.streamChat(baseRequestParams); return this.streamChat(baseRequestParams);
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment