Skip to content
Snippets Groups Projects
Commit a5ee6121 authored by timothycarambat's avatar timothycarambat
Browse files

Add patch for `o#` models on Azure

connect #3023
Note: depends on user naming the deployment correctly.
parent 694aa85e
No related branches found
No related tags found
No related merge requests found
...@@ -38,6 +38,16 @@ class AzureOpenAiLLM { ...@@ -38,6 +38,16 @@ class AzureOpenAiLLM {
); );
} }
/**
* Check if the model is an o# type model.
* NOTE: This is HIGHLY dependent on if the user named their deployment "o1" or "o3-mini" or something else to match the model name.
* It cannot be determined by the model name alone since model deployments can be named arbitrarily.
* @returns {boolean}
*/
get isOTypeModel() {
return this.model.startsWith("o");
}
#log(text, ...args) { #log(text, ...args) {
console.log(`\x1b[32m[AzureOpenAi]\x1b[0m ${text}`, ...args); console.log(`\x1b[32m[AzureOpenAi]\x1b[0m ${text}`, ...args);
} }
...@@ -55,6 +65,7 @@ class AzureOpenAiLLM { ...@@ -55,6 +65,7 @@ class AzureOpenAiLLM {
} }
streamingEnabled() { streamingEnabled() {
if (this.isOTypeModel && this.model !== "o3-mini") return false;
return "streamGetChatCompletion" in this; return "streamGetChatCompletion" in this;
} }
...@@ -110,7 +121,7 @@ class AzureOpenAiLLM { ...@@ -110,7 +121,7 @@ class AzureOpenAiLLM {
attachments = [], // This is the specific attachment for only this prompt attachments = [], // This is the specific attachment for only this prompt
}) { }) {
const prompt = { const prompt = {
role: "system", role: this.isOTypeModel ? "user" : "system",
content: `${systemPrompt}${this.#appendContext(contextTexts)}`, content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
}; };
return [ return [
...@@ -131,7 +142,7 @@ class AzureOpenAiLLM { ...@@ -131,7 +142,7 @@ class AzureOpenAiLLM {
const result = await LLMPerformanceMonitor.measureAsyncFunction( const result = await LLMPerformanceMonitor.measureAsyncFunction(
this.openai.getChatCompletions(this.model, messages, { this.openai.getChatCompletions(this.model, messages, {
temperature, ...(this.isOTypeModel ? {} : { temperature }),
}) })
); );
...@@ -161,7 +172,7 @@ class AzureOpenAiLLM { ...@@ -161,7 +172,7 @@ class AzureOpenAiLLM {
const measuredStreamRequest = await LLMPerformanceMonitor.measureStream( const measuredStreamRequest = await LLMPerformanceMonitor.measureStream(
await this.openai.streamChatCompletions(this.model, messages, { await this.openai.streamChatCompletions(this.model, messages, {
temperature, ...(this.isOTypeModel ? {} : { temperature }),
n: 1, n: 1,
}), }),
messages messages
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment