From e76baacec4a9f3bb5b9bf1f24dc0c4ba0dbaae09 Mon Sep 17 00:00:00 2001
From: Timothy Carambat <rambat1010@gmail.com>
Date: Wed, 5 Feb 2025 15:22:45 -0800
Subject: [PATCH] Add reasoning flag for azure models with "default" fallback
 (#3128)

---
 .../LLMSelection/AzureAiOptions/index.jsx     | 15 +++++++++++++
 server/models/systemSettings.js               |  1 +
 server/utils/AiProviders/azureOpenAi/index.js | 22 +++++++++----------
 server/utils/helpers/updateENV.js             |  9 ++++++++
 4 files changed, 35 insertions(+), 12 deletions(-)

diff --git a/frontend/src/components/LLMSelection/AzureAiOptions/index.jsx b/frontend/src/components/LLMSelection/AzureAiOptions/index.jsx
index 49e45cc6c..0274175be 100644
--- a/frontend/src/components/LLMSelection/AzureAiOptions/index.jsx
+++ b/frontend/src/components/LLMSelection/AzureAiOptions/index.jsx
@@ -71,6 +71,21 @@ export default function AzureAiOptions({ settings }) {
             </option>
           </select>
         </div>
+
+        <div className="flex flex-col w-60">
+          <label className="text-white text-sm font-semibold block mb-3">
+            Model Type
+          </label>
+          <select
+            name="AzureOpenAiModelType"
+            defaultValue={settings?.AzureOpenAiModelType || "default"}
+            className="border-none bg-theme-settings-input-bg text-white placeholder:text-theme-settings-input-placeholder text-sm rounded-lg focus:outline-primary-button active:outline-primary-button outline-none block w-full p-2.5"
+            required={true}
+          >
+            <option value="default">Default</option>
+            <option value="reasoning">Reasoning</option>
+          </select>
+        </div>
       </div>
     </div>
   );
diff --git a/server/models/systemSettings.js b/server/models/systemSettings.js
index 42ce8723b..7f7d0ea34 100644
--- a/server/models/systemSettings.js
+++ b/server/models/systemSettings.js
@@ -442,6 +442,7 @@ const SystemSettings = {
       AzureOpenAiModelPref: process.env.OPEN_MODEL_PREF,
       AzureOpenAiEmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
       AzureOpenAiTokenLimit: process.env.AZURE_OPENAI_TOKEN_LIMIT || 4096,
+      AzureOpenAiModelType: process.env.AZURE_OPENAI_MODEL_TYPE || "default",
 
       // Anthropic Keys
       AnthropicApiKey: !!process.env.ANTHROPIC_API_KEY,
diff --git a/server/utils/AiProviders/azureOpenAi/index.js b/server/utils/AiProviders/azureOpenAi/index.js
index 6b726c88d..f15d6ecfd 100644
--- a/server/utils/AiProviders/azureOpenAi/index.js
+++ b/server/utils/AiProviders/azureOpenAi/index.js
@@ -25,6 +25,8 @@ class AzureOpenAiLLM {
       }
     );
     this.model = modelPreference ?? process.env.OPEN_MODEL_PREF;
+    this.isOTypeModel =
+      process.env.AZURE_OPENAI_MODEL_TYPE === "reasoning" || false;
     this.limits = {
       history: this.promptWindowLimit() * 0.15,
       system: this.promptWindowLimit() * 0.15,
@@ -34,20 +36,10 @@ class AzureOpenAiLLM {
     this.embedder = embedder ?? new NativeEmbedder();
     this.defaultTemp = 0.7;
     this.#log(
-      `Initialized. Model "${this.model}" @ ${this.promptWindowLimit()} tokens. API-Version: ${this.apiVersion}`
+      `Initialized. Model "${this.model}" @ ${this.promptWindowLimit()} tokens.\nAPI-Version: ${this.apiVersion}.\nModel Type: ${this.isOTypeModel ? "reasoning" : "default"}`
     );
   }
 
-  /**
-   * Check if the model is an o# type model.
-   * NOTE: This is HIGHLY dependent on if the user named their deployment "o1" or "o3-mini" or something else to match the model name.
-   * It cannot be determined by the model name alone since model deployments can be named arbitrarily.
-   * @returns {boolean}
-   */
-  get isOTypeModel() {
-    return this.model.startsWith("o");
-  }
-
   #log(text, ...args) {
     console.log(`\x1b[32m[AzureOpenAi]\x1b[0m ${text}`, ...args);
   }
@@ -65,7 +57,13 @@ class AzureOpenAiLLM {
   }
 
   streamingEnabled() {
-    if (this.isOTypeModel && this.model !== "o3-mini") return false;
+    // Streaming of reasoning models is not supported
+    if (this.isOTypeModel) {
+      this.#log(
+        "Streaming will be disabled. AZURE_OPENAI_MODEL_TYPE is set to 'reasoning'."
+      );
+      return false;
+    }
     return "streamGetChatCompletion" in this;
   }
 
diff --git a/server/utils/helpers/updateENV.js b/server/utils/helpers/updateENV.js
index 88e9de476..e3c1b9140 100644
--- a/server/utils/helpers/updateENV.js
+++ b/server/utils/helpers/updateENV.js
@@ -35,6 +35,15 @@ const KEY_MAPPING = {
     envKey: "EMBEDDING_MODEL_PREF",
     checks: [isNotEmpty],
   },
+  AzureOpenAiModelType: {
+    envKey: "AZURE_OPENAI_MODEL_TYPE",
+    checks: [
+      (input) =>
+        ["default", "reasoning"].includes(input)
+          ? null
+          : "Invalid model type. Must be one of: default, reasoning.",
+    ],
+  },
 
   // Anthropic Settings
   AnthropicApiKey: {
-- 
GitLab