diff --git a/.changeset/eighty-mirrors-invite.md b/.changeset/eighty-mirrors-invite.md
new file mode 100644
index 0000000000000000000000000000000000000000..3905d01b27a74803102d03cf00eb1bc487b5bc31
--- /dev/null
+++ b/.changeset/eighty-mirrors-invite.md
@@ -0,0 +1,5 @@
+---
+"@llamaindex/openai": patch
+---
+
+fix: remove temp for o3-mini
diff --git a/packages/providers/openai/src/llm.ts b/packages/providers/openai/src/llm.ts
index f15d601b99780593787ddea0eb42c54ec877c8dd..926d478d185f0e2dee37ec5a7db0586e2ac356d4 100644
--- a/packages/providers/openai/src/llm.ts
+++ b/packages/providers/openai/src/llm.ts
@@ -140,6 +140,10 @@ export function isFunctionCallingModel(llm: LLM): llm is OpenAI {
   return isChatModel && !isOld && !isO1;
 }
 
+export function isTemperatureSupported(model: ChatModel | string): boolean {
+  return !model.startsWith("o3");
+}
+
 export type OpenAIAdditionalMetadata = object;
 
 export type OpenAIAdditionalChatOptions = Omit<
@@ -349,7 +353,7 @@ export class OpenAI extends ToolCallLLM<OpenAIAdditionalChatOptions> {
     const { messages, stream, tools, additionalChatOptions } = params;
     const baseRequestParams = <OpenAILLM.Chat.ChatCompletionCreateParams>{
       model: this.model,
-      temperature: this.temperature,
+      temperature: isTemperatureSupported(this.model) ? this.temperature : null,
       max_tokens: this.maxTokens,
       tools: tools?.map(OpenAI.toTool),
       messages: OpenAI.toOpenAIMessage(messages),