From 7e1b96a2db6cf100ca429145c30bdd39cf3ec283 Mon Sep 17 00:00:00 2001 From: Marcus Schiesser <mail@marcusschiesser.de> Date: Fri, 24 May 2024 23:15:09 +0800 Subject: [PATCH] fix: default to Settings.llm (#885) --- packages/core/src/engines/chat/ContextChatEngine.ts | 5 ++--- packages/core/src/engines/chat/SimpleChatEngine.ts | 4 ++-- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/packages/core/src/engines/chat/ContextChatEngine.ts b/packages/core/src/engines/chat/ContextChatEngine.ts index 9659a1f2f..feeaef0f0 100644 --- a/packages/core/src/engines/chat/ContextChatEngine.ts +++ b/packages/core/src/engines/chat/ContextChatEngine.ts @@ -3,9 +3,9 @@ import { getHistory } from "../../ChatHistory.js"; import type { ContextSystemPrompt } from "../../Prompt.js"; import { Response } from "../../Response.js"; import type { BaseRetriever } from "../../Retriever.js"; +import { Settings } from "../../Settings.js"; import { wrapEventCaller } from "../../internal/context/EventCaller.js"; import type { ChatMessage, ChatResponseChunk, LLM } from "../../llm/index.js"; -import { OpenAI } from "../../llm/index.js"; import type { MessageContent } from "../../llm/types.js"; import { extractText, @@ -42,8 +42,7 @@ export class ContextChatEngine extends PromptMixin implements ChatEngine { systemPrompt?: string; }) { super(); - this.chatModel = - init.chatModel ?? new OpenAI({ model: "gpt-3.5-turbo-16k" }); + this.chatModel = init.chatModel ?? Settings.llm; this.chatHistory = getHistory(init?.chatHistory); this.contextGenerator = new DefaultContextGenerator({ retriever: init.retriever, diff --git a/packages/core/src/engines/chat/SimpleChatEngine.ts b/packages/core/src/engines/chat/SimpleChatEngine.ts index e57ce7fa9..dd6ab0a6d 100644 --- a/packages/core/src/engines/chat/SimpleChatEngine.ts +++ b/packages/core/src/engines/chat/SimpleChatEngine.ts @@ -1,9 +1,9 @@ import type { ChatHistory } from "../../ChatHistory.js"; import { getHistory } from "../../ChatHistory.js"; import { Response } from "../../Response.js"; +import { Settings } from "../../Settings.js"; import { wrapEventCaller } from "../../internal/context/EventCaller.js"; import type { ChatResponseChunk, LLM } from "../../llm/index.js"; -import { OpenAI } from "../../llm/index.js"; import { extractText, streamConverter, @@ -25,7 +25,7 @@ export class SimpleChatEngine implements ChatEngine { constructor(init?: Partial<SimpleChatEngine>) { this.chatHistory = getHistory(init?.chatHistory); - this.llm = init?.llm ?? new OpenAI(); + this.llm = init?.llm ?? Settings.llm; } chat(params: ChatEngineParamsStreaming): Promise<AsyncIterable<Response>>; -- GitLab