Skip to content
Snippets Groups Projects
Unverified Commit aa0f5863 authored by Thuc Pham's avatar Thuc Pham Committed by GitHub
Browse files

feat: allow adding system prompt to chat engine (#855)

parent ff031397
No related branches found
No related tags found
No related merge requests found
...@@ -31,6 +31,7 @@ export class ContextChatEngine extends PromptMixin implements ChatEngine { ...@@ -31,6 +31,7 @@ export class ContextChatEngine extends PromptMixin implements ChatEngine {
chatModel: LLM; chatModel: LLM;
chatHistory: ChatHistory; chatHistory: ChatHistory;
contextGenerator: ContextGenerator; contextGenerator: ContextGenerator;
systemPrompt?: string;
constructor(init: { constructor(init: {
retriever: BaseRetriever; retriever: BaseRetriever;
...@@ -38,9 +39,9 @@ export class ContextChatEngine extends PromptMixin implements ChatEngine { ...@@ -38,9 +39,9 @@ export class ContextChatEngine extends PromptMixin implements ChatEngine {
chatHistory?: ChatMessage[]; chatHistory?: ChatMessage[];
contextSystemPrompt?: ContextSystemPrompt; contextSystemPrompt?: ContextSystemPrompt;
nodePostprocessors?: BaseNodePostprocessor[]; nodePostprocessors?: BaseNodePostprocessor[];
systemPrompt?: string;
}) { }) {
super(); super();
this.chatModel = this.chatModel =
init.chatModel ?? new OpenAI({ model: "gpt-3.5-turbo-16k" }); init.chatModel ?? new OpenAI({ model: "gpt-3.5-turbo-16k" });
this.chatHistory = getHistory(init?.chatHistory); this.chatHistory = getHistory(init?.chatHistory);
...@@ -49,6 +50,7 @@ export class ContextChatEngine extends PromptMixin implements ChatEngine { ...@@ -49,6 +50,7 @@ export class ContextChatEngine extends PromptMixin implements ChatEngine {
contextSystemPrompt: init?.contextSystemPrompt, contextSystemPrompt: init?.contextSystemPrompt,
nodePostprocessors: init?.nodePostprocessors, nodePostprocessors: init?.nodePostprocessors,
}); });
this.systemPrompt = init.systemPrompt;
} }
protected _getPromptModules(): Record<string, ContextGenerator> { protected _getPromptModules(): Record<string, ContextGenerator> {
...@@ -71,7 +73,6 @@ export class ContextChatEngine extends PromptMixin implements ChatEngine { ...@@ -71,7 +73,6 @@ export class ContextChatEngine extends PromptMixin implements ChatEngine {
message, message,
chatHistory, chatHistory,
); );
if (stream) { if (stream) {
const stream = await this.chatModel.chat({ const stream = await this.chatModel.chat({
messages: requestMessages.messages, messages: requestMessages.messages,
...@@ -113,9 +114,16 @@ export class ContextChatEngine extends PromptMixin implements ChatEngine { ...@@ -113,9 +114,16 @@ export class ContextChatEngine extends PromptMixin implements ChatEngine {
}); });
const textOnly = extractText(message); const textOnly = extractText(message);
const context = await this.contextGenerator.generate(textOnly); const context = await this.contextGenerator.generate(textOnly);
const messages = await chatHistory.requestMessages( const systemMessage = this.prependSystemPrompt(context.message);
context ? [context.message] : undefined, const messages = await chatHistory.requestMessages([systemMessage]);
);
return { nodes: context.nodes, messages }; return { nodes: context.nodes, messages };
} }
private prependSystemPrompt(message: ChatMessage): ChatMessage {
if (!this.systemPrompt) return message;
return {
...message,
content: this.systemPrompt.trim() + "\n" + message.content,
};
}
} }
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment