Skip to content
Snippets Groups Projects
Unverified Commit 7e1b96a2 authored by Marcus Schiesser's avatar Marcus Schiesser Committed by GitHub
Browse files

fix: default to Settings.llm (#885)

parent 8e26f753
No related branches found
No related tags found
No related merge requests found
...@@ -3,9 +3,9 @@ import { getHistory } from "../../ChatHistory.js"; ...@@ -3,9 +3,9 @@ import { getHistory } from "../../ChatHistory.js";
import type { ContextSystemPrompt } from "../../Prompt.js"; import type { ContextSystemPrompt } from "../../Prompt.js";
import { Response } from "../../Response.js"; import { Response } from "../../Response.js";
import type { BaseRetriever } from "../../Retriever.js"; import type { BaseRetriever } from "../../Retriever.js";
import { Settings } from "../../Settings.js";
import { wrapEventCaller } from "../../internal/context/EventCaller.js"; import { wrapEventCaller } from "../../internal/context/EventCaller.js";
import type { ChatMessage, ChatResponseChunk, LLM } from "../../llm/index.js"; import type { ChatMessage, ChatResponseChunk, LLM } from "../../llm/index.js";
import { OpenAI } from "../../llm/index.js";
import type { MessageContent } from "../../llm/types.js"; import type { MessageContent } from "../../llm/types.js";
import { import {
extractText, extractText,
...@@ -42,8 +42,7 @@ export class ContextChatEngine extends PromptMixin implements ChatEngine { ...@@ -42,8 +42,7 @@ export class ContextChatEngine extends PromptMixin implements ChatEngine {
systemPrompt?: string; systemPrompt?: string;
}) { }) {
super(); super();
this.chatModel = this.chatModel = init.chatModel ?? Settings.llm;
init.chatModel ?? new OpenAI({ model: "gpt-3.5-turbo-16k" });
this.chatHistory = getHistory(init?.chatHistory); this.chatHistory = getHistory(init?.chatHistory);
this.contextGenerator = new DefaultContextGenerator({ this.contextGenerator = new DefaultContextGenerator({
retriever: init.retriever, retriever: init.retriever,
......
import type { ChatHistory } from "../../ChatHistory.js"; import type { ChatHistory } from "../../ChatHistory.js";
import { getHistory } from "../../ChatHistory.js"; import { getHistory } from "../../ChatHistory.js";
import { Response } from "../../Response.js"; import { Response } from "../../Response.js";
import { Settings } from "../../Settings.js";
import { wrapEventCaller } from "../../internal/context/EventCaller.js"; import { wrapEventCaller } from "../../internal/context/EventCaller.js";
import type { ChatResponseChunk, LLM } from "../../llm/index.js"; import type { ChatResponseChunk, LLM } from "../../llm/index.js";
import { OpenAI } from "../../llm/index.js";
import { import {
extractText, extractText,
streamConverter, streamConverter,
...@@ -25,7 +25,7 @@ export class SimpleChatEngine implements ChatEngine { ...@@ -25,7 +25,7 @@ export class SimpleChatEngine implements ChatEngine {
constructor(init?: Partial<SimpleChatEngine>) { constructor(init?: Partial<SimpleChatEngine>) {
this.chatHistory = getHistory(init?.chatHistory); this.chatHistory = getHistory(init?.chatHistory);
this.llm = init?.llm ?? new OpenAI(); this.llm = init?.llm ?? Settings.llm;
} }
chat(params: ChatEngineParamsStreaming): Promise<AsyncIterable<Response>>; chat(params: ChatEngineParamsStreaming): Promise<AsyncIterable<Response>>;
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment