Skip to content
Snippets Groups Projects
Unverified Commit 2019a041 authored by Parham Saidi's avatar Parham Saidi Committed by GitHub
Browse files

fix: o3 calls do not support temperature param (#1622)

parent 067a4894
No related branches found
No related tags found
No related merge requests found
---
"@llamaindex/openai": patch
---
fix: remove temp for o3-mini
......@@ -140,6 +140,10 @@ export function isFunctionCallingModel(llm: LLM): llm is OpenAI {
return isChatModel && !isOld && !isO1;
}
export function isTemperatureSupported(model: ChatModel | string): boolean {
return !model.startsWith("o3");
}
export type OpenAIAdditionalMetadata = object;
export type OpenAIAdditionalChatOptions = Omit<
......@@ -349,7 +353,7 @@ export class OpenAI extends ToolCallLLM<OpenAIAdditionalChatOptions> {
const { messages, stream, tools, additionalChatOptions } = params;
const baseRequestParams = <OpenAILLM.Chat.ChatCompletionCreateParams>{
model: this.model,
temperature: this.temperature,
temperature: isTemperatureSupported(this.model) ? this.temperature : null,
max_tokens: this.maxTokens,
tools: tools?.map(OpenAI.toTool),
messages: OpenAI.toOpenAIMessage(messages),
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment