Skip to content
Snippets Groups Projects
Commit 204b8f53 authored by Yi Ding's avatar Yi Ding
Browse files

chatengine

parent 71dd461a
Branches
Tags
No related merge requests found
import {
BaseChatModel,
BaseMessage,
ChatOpenAI,
LLMResult,
} from "./LanguageModel";
import {
SimplePrompt,
defaultCondenseQuestionPrompt,
messagesToHistoryStr,
} from "./Prompt";
import { BaseQueryEngine } from "./QueryEngine";
import { Response } from "./Response";
import { ServiceContext, serviceContextFromDefaults } from "./ServiceContext";
interface ChatEngine {
chatRepl(): void;
achat(message: string, chatHistory?: BaseMessage[]): Promise<Response>;
}
export class SimpleChatEngine implements ChatEngine {
chatHistory: BaseMessage[];
llm: BaseChatModel;
constructor(init?: Partial<SimpleChatEngine>) {
this.chatHistory = init?.chatHistory ?? [];
this.llm = init?.llm ?? new ChatOpenAI();
}
chatRepl() {
throw new Error("Method not implemented.");
}
async achat(message: string, chatHistory?: BaseMessage[]): Promise<Response> {
chatHistory = chatHistory ?? this.chatHistory;
chatHistory.push({ content: message, type: "human" });
const response = await this.llm.agenerate(chatHistory);
chatHistory.push({ content: response.generations[0][0].text, type: "ai" });
this.chatHistory = chatHistory;
return new Response(response.generations[0][0].text);
}
reset() {
this.chatHistory = [];
}
}
export class CondenseQuestionChatEngine implements ChatEngine {
queryEngine: BaseQueryEngine;
chatHistory: BaseMessage[];
serviceContext: ServiceContext;
condenseMessagePrompt: SimplePrompt;
constructor(init: {
queryEngine: BaseQueryEngine;
chatHistory: BaseMessage[];
serviceContext?: ServiceContext;
condenseMessagePrompt?: SimplePrompt;
}) {
this.queryEngine = init.queryEngine;
this.chatHistory = init?.chatHistory ?? [];
this.serviceContext =
init?.serviceContext ?? serviceContextFromDefaults({});
this.condenseMessagePrompt =
init?.condenseMessagePrompt ?? defaultCondenseQuestionPrompt;
}
private async acondenseQuestion(
chatHistory: BaseMessage[],
question: string
) {
const chatHistoryStr = messagesToHistoryStr(chatHistory);
return this.serviceContext.llmPredictor.apredict(
defaultCondenseQuestionPrompt,
{
question: question,
chat_history: chatHistoryStr,
}
);
}
async achat(
message: string,
chatHistory?: BaseMessage[] | undefined
): Promise<Response> {
chatHistory = chatHistory ?? this.chatHistory;
const condensedQuestion = await this.acondenseQuestion(
chatHistory,
message
);
const response = await this.queryEngine.aquery(condensedQuestion);
chatHistory.push({ content: message, type: "human" });
chatHistory.push({ content: response.response, type: "ai" });
return response;
}
chatRepl() {
throw new Error("Method not implemented.");
}
reset() {
this.chatHistory = [];
}
}
...@@ -10,7 +10,7 @@ export interface BaseLanguageModel {} ...@@ -10,7 +10,7 @@ export interface BaseLanguageModel {}
type MessageType = "human" | "ai" | "system" | "generic" | "function"; type MessageType = "human" | "ai" | "system" | "generic" | "function";
interface BaseMessage { export interface BaseMessage {
content: string; content: string;
type: MessageType; type: MessageType;
} }
...@@ -24,9 +24,11 @@ export interface LLMResult { ...@@ -24,9 +24,11 @@ export interface LLMResult {
generations: Generation[][]; // Each input can have more than one generations generations: Generation[][]; // Each input can have more than one generations
} }
export class BaseChatModel implements BaseLanguageModel {} export interface BaseChatModel extends BaseLanguageModel {
agenerate(messages: BaseMessage[]): Promise<LLMResult>;
}
export class ChatOpenAI extends BaseChatModel { export class ChatOpenAI implements BaseChatModel {
model: string; model: string;
temperature: number = 0.7; temperature: number = 0.7;
openAIKey: string | null = null; openAIKey: string | null = null;
...@@ -38,7 +40,6 @@ export class ChatOpenAI extends BaseChatModel { ...@@ -38,7 +40,6 @@ export class ChatOpenAI extends BaseChatModel {
session: OpenAISession; session: OpenAISession;
constructor(model: string = "gpt-3.5-turbo") { constructor(model: string = "gpt-3.5-turbo") {
super();
this.model = model; this.model = model;
this.session = getOpenAISession(); this.session = getOpenAISession();
} }
......
import { BaseMessage } from "./LanguageModel";
import { SubQuestion } from "./QuestionGenerator"; import { SubQuestion } from "./QuestionGenerator";
import { ToolMetadata } from "./Tool"; import { ToolMetadata } from "./Tool";
...@@ -266,3 +267,44 @@ ${queryStr} ...@@ -266,3 +267,44 @@ ${queryStr}
<Output> <Output>
`; `;
}; };
// DEFAULT_TEMPLATE = """\
// Given a conversation (between Human and Assistant) and a follow up message from Human, \
// rewrite the message to be a standalone question that captures all relevant context \
// from the conversation.
// <Chat History>
// {chat_history}
// <Follow Up Message>
// {question}
// <Standalone question>
// """
export const defaultCondenseQuestionPrompt: SimplePrompt = (input) => {
const { chatHistory, question } = input;
return `Given a conversation (between Human and Assistant) and a follow up message from Human, rewrite the message to be a standalone question that captures all relevant context from the conversation.
<Chat History>
${chatHistory}
<Follow Up Message>
${question}
<Standalone question>
`;
};
export function messagesToHistoryStr(messages: BaseMessage[]) {
return messages.reduce((acc, message) => {
acc += acc ? "\n" : "";
if (message.type === "human") {
acc += `Human: ${message.content}`;
} else {
acc += `Assistant: ${message.content}`;
}
return acc;
}, "");
}
import { BaseNode } from "./Node"; import { BaseNode } from "./Node";
export class Response { export class Response {
response?: string; response: string;
sourceNodes: BaseNode[]; sourceNodes?: BaseNode[];
constructor(response?: string, sourceNodes?: BaseNode[]) { constructor(response: string, sourceNodes?: BaseNode[]) {
this.response = response; this.response = response;
this.sourceNodes = sourceNodes || []; this.sourceNodes = sourceNodes || [];
} }
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment