From 2019a041f73ad83c08e4703199528a9b6b1482f1 Mon Sep 17 00:00:00 2001
From: Parham Saidi <parham@parha.me>
Date: Thu, 6 Feb 2025 04:23:23 +0100
Subject: [PATCH] fix: o3 calls do not support temperature param (#1622)

---
 .changeset/eighty-mirrors-invite.md  | 5 +++++
 packages/providers/openai/src/llm.ts | 6 +++++-
 2 files changed, 10 insertions(+), 1 deletion(-)
 create mode 100644 .changeset/eighty-mirrors-invite.md

diff --git a/.changeset/eighty-mirrors-invite.md b/.changeset/eighty-mirrors-invite.md
new file mode 100644
index 000000000..3905d01b2
--- /dev/null
+++ b/.changeset/eighty-mirrors-invite.md
@@ -0,0 +1,5 @@
+---
+"@llamaindex/openai": patch
+---
+
+fix: remove temp for o3-mini
diff --git a/packages/providers/openai/src/llm.ts b/packages/providers/openai/src/llm.ts
index f15d601b9..926d478d1 100644
--- a/packages/providers/openai/src/llm.ts
+++ b/packages/providers/openai/src/llm.ts
@@ -140,6 +140,10 @@ export function isFunctionCallingModel(llm: LLM): llm is OpenAI {
   return isChatModel && !isOld && !isO1;
 }
 
+export function isTemperatureSupported(model: ChatModel | string): boolean {
+  return !model.startsWith("o3");
+}
+
 export type OpenAIAdditionalMetadata = object;
 
 export type OpenAIAdditionalChatOptions = Omit<
@@ -349,7 +353,7 @@ export class OpenAI extends ToolCallLLM<OpenAIAdditionalChatOptions> {
     const { messages, stream, tools, additionalChatOptions } = params;
     const baseRequestParams = <OpenAILLM.Chat.ChatCompletionCreateParams>{
       model: this.model,
-      temperature: this.temperature,
+      temperature: isTemperatureSupported(this.model) ? this.temperature : null,
       max_tokens: this.maxTokens,
       tools: tools?.map(OpenAI.toTool),
       messages: OpenAI.toOpenAIMessage(messages),
-- 
GitLab