Skip to content
Snippets Groups Projects
Unverified Commit e76baace authored by Timothy Carambat's avatar Timothy Carambat Committed by GitHub
Browse files

Add reasoning flag for azure models with "default" fallback (#3128)

parent 805b4f79
No related branches found
No related tags found
No related merge requests found
...@@ -71,6 +71,21 @@ export default function AzureAiOptions({ settings }) { ...@@ -71,6 +71,21 @@ export default function AzureAiOptions({ settings }) {
</option> </option>
</select> </select>
</div> </div>
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-3">
Model Type
</label>
<select
name="AzureOpenAiModelType"
defaultValue={settings?.AzureOpenAiModelType || "default"}
className="border-none bg-theme-settings-input-bg text-white placeholder:text-theme-settings-input-placeholder text-sm rounded-lg focus:outline-primary-button active:outline-primary-button outline-none block w-full p-2.5"
required={true}
>
<option value="default">Default</option>
<option value="reasoning">Reasoning</option>
</select>
</div>
</div> </div>
</div> </div>
); );
......
...@@ -442,6 +442,7 @@ const SystemSettings = { ...@@ -442,6 +442,7 @@ const SystemSettings = {
AzureOpenAiModelPref: process.env.OPEN_MODEL_PREF, AzureOpenAiModelPref: process.env.OPEN_MODEL_PREF,
AzureOpenAiEmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF, AzureOpenAiEmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
AzureOpenAiTokenLimit: process.env.AZURE_OPENAI_TOKEN_LIMIT || 4096, AzureOpenAiTokenLimit: process.env.AZURE_OPENAI_TOKEN_LIMIT || 4096,
AzureOpenAiModelType: process.env.AZURE_OPENAI_MODEL_TYPE || "default",
// Anthropic Keys // Anthropic Keys
AnthropicApiKey: !!process.env.ANTHROPIC_API_KEY, AnthropicApiKey: !!process.env.ANTHROPIC_API_KEY,
......
...@@ -25,6 +25,8 @@ class AzureOpenAiLLM { ...@@ -25,6 +25,8 @@ class AzureOpenAiLLM {
} }
); );
this.model = modelPreference ?? process.env.OPEN_MODEL_PREF; this.model = modelPreference ?? process.env.OPEN_MODEL_PREF;
this.isOTypeModel =
process.env.AZURE_OPENAI_MODEL_TYPE === "reasoning" || false;
this.limits = { this.limits = {
history: this.promptWindowLimit() * 0.15, history: this.promptWindowLimit() * 0.15,
system: this.promptWindowLimit() * 0.15, system: this.promptWindowLimit() * 0.15,
...@@ -34,20 +36,10 @@ class AzureOpenAiLLM { ...@@ -34,20 +36,10 @@ class AzureOpenAiLLM {
this.embedder = embedder ?? new NativeEmbedder(); this.embedder = embedder ?? new NativeEmbedder();
this.defaultTemp = 0.7; this.defaultTemp = 0.7;
this.#log( this.#log(
`Initialized. Model "${this.model}" @ ${this.promptWindowLimit()} tokens. API-Version: ${this.apiVersion}` `Initialized. Model "${this.model}" @ ${this.promptWindowLimit()} tokens.\nAPI-Version: ${this.apiVersion}.\nModel Type: ${this.isOTypeModel ? "reasoning" : "default"}`
); );
} }
/**
* Check if the model is an o# type model.
* NOTE: This is HIGHLY dependent on if the user named their deployment "o1" or "o3-mini" or something else to match the model name.
* It cannot be determined by the model name alone since model deployments can be named arbitrarily.
* @returns {boolean}
*/
get isOTypeModel() {
return this.model.startsWith("o");
}
#log(text, ...args) { #log(text, ...args) {
console.log(`\x1b[32m[AzureOpenAi]\x1b[0m ${text}`, ...args); console.log(`\x1b[32m[AzureOpenAi]\x1b[0m ${text}`, ...args);
} }
...@@ -65,7 +57,13 @@ class AzureOpenAiLLM { ...@@ -65,7 +57,13 @@ class AzureOpenAiLLM {
} }
streamingEnabled() { streamingEnabled() {
if (this.isOTypeModel && this.model !== "o3-mini") return false; // Streaming of reasoning models is not supported
if (this.isOTypeModel) {
this.#log(
"Streaming will be disabled. AZURE_OPENAI_MODEL_TYPE is set to 'reasoning'."
);
return false;
}
return "streamGetChatCompletion" in this; return "streamGetChatCompletion" in this;
} }
......
...@@ -35,6 +35,15 @@ const KEY_MAPPING = { ...@@ -35,6 +35,15 @@ const KEY_MAPPING = {
envKey: "EMBEDDING_MODEL_PREF", envKey: "EMBEDDING_MODEL_PREF",
checks: [isNotEmpty], checks: [isNotEmpty],
}, },
AzureOpenAiModelType: {
envKey: "AZURE_OPENAI_MODEL_TYPE",
checks: [
(input) =>
["default", "reasoning"].includes(input)
? null
: "Invalid model type. Must be one of: default, reasoning.",
],
},
// Anthropic Settings // Anthropic Settings
AnthropicApiKey: { AnthropicApiKey: {
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment