Skip to content
Snippets Groups Projects
Commit a4d5b258 authored by timothycarambat's avatar timothycarambat
Browse files

o3 model patch

parent 121fbea2
No related branches found
No related tags found
No related merge requests found
......@@ -24,6 +24,8 @@ function supportedModel(provider, model = "") {
"o1-preview-2024-09-12",
"o1-mini",
"o1-mini-2024-09-12",
"o3-mini",
"o3-mini-2025-01-31",
].includes(model) === false
);
}
......
......@@ -66,10 +66,13 @@ const MODEL_MAP = {
"o1-preview-2024-09-12": 128_000,
"o1-mini": 128_000,
"o1-mini-2024-09-12": 128_000,
"o3-mini": 200_000,
"o3-mini-2025-01-31": 200_000,
},
deepseek: {
"deepseek-chat": 128_000,
"deepseek-coder": 128_000,
"deepseek-reasoner": 128_000,
},
xai: {
"grok-beta": 131_072,
......
......@@ -31,8 +31,8 @@ class OpenAiLLM {
* Check if the model is an o1 model.
* @returns {boolean}
*/
get isO1Model() {
return this.model.startsWith("o1");
get isOTypeModel() {
return this.model.startsWith("o");
}
#appendContext(contextTexts = []) {
......@@ -48,7 +48,8 @@ class OpenAiLLM {
}
streamingEnabled() {
if (this.isO1Model) return false;
// o3-mini is the only o-type model that supports streaming
if (this.isOTypeModel && this.model !== "o3-mini") return false;
return "streamGetChatCompletion" in this;
}
......@@ -68,7 +69,7 @@ class OpenAiLLM {
async isValidChatCompletionModel(modelName = "") {
const isPreset =
modelName.toLowerCase().includes("gpt") ||
modelName.toLowerCase().includes("o1");
modelName.toLowerCase().startsWith("o");
if (isPreset) return true;
const model = await this.openai.models
......@@ -117,7 +118,7 @@ class OpenAiLLM {
// in order to combat this, we can use the "user" role as a replacement for now
// https://community.openai.com/t/o1-models-do-not-support-system-role-in-chat-completion/953880
const prompt = {
role: this.isO1Model ? "user" : "system",
role: this.isOTypeModel ? "user" : "system",
content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
};
return [
......@@ -141,7 +142,7 @@ class OpenAiLLM {
.create({
model: this.model,
messages,
temperature: this.isO1Model ? 1 : temperature, // o1 models only accept temperature 1
temperature: this.isOTypeModel ? 1 : temperature, // o1 models only accept temperature 1
})
.catch((e) => {
throw new Error(e.message);
......@@ -177,7 +178,7 @@ class OpenAiLLM {
model: this.model,
stream: true,
messages,
temperature: this.isO1Model ? 1 : temperature, // o1 models only accept temperature 1
temperature: this.isOTypeModel ? 1 : temperature, // o1 models only accept temperature 1
}),
messages
// runPromptTokenCalculation: true - We manually count the tokens because OpenAI does not provide them in the stream
......
......@@ -145,7 +145,7 @@ async function openAiModels(apiKey = null) {
.filter(
(model) =>
(model.id.includes("gpt") && !model.id.startsWith("ft:")) ||
model.id.includes("o1")
model.id.startsWith("o") // o1, o1-mini, o3, etc
)
.filter(
(model) =>
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment