Skip to content
Snippets Groups Projects
Commit b5b316af authored by thucpn's avatar thucpn
Browse files

use chat history as input of start event

parent d758c87a
No related merge requests found
import type { Message } from "ai";
import express from "express";
import type { ChatMessage } from "llamaindex";
import { getUserMessageContent, pipeExpressResponse } from "./helper";
import { pipeExpressResponse } from "./helper";
import { chatWithWorkflow } from "./workflow/stream";
import type { ServerWorkflow } from "./workflow/type";
export interface LlamaIndexServerParams {
workflow: ServerWorkflow;
port?: number;
callbacks?: {
beforeChat?: (messages: ChatMessage[]) => void;
};
}
export class LlamaIndexServer {
app: express.Application;
workflow: ServerWorkflow;
port: number;
callbacks?: LlamaIndexServerParams["callbacks"];
constructor({ workflow, port = 3000 }: LlamaIndexServerParams) {
this.app = express();
......@@ -31,13 +26,8 @@ export class LlamaIndexServer {
res: express.Response,
) => {
try {
const { messages } = req.body as { messages: Message[] };
const userMessageContent = getUserMessageContent(messages);
this.callbacks?.beforeChat?.(messages as ChatMessage[]);
const streamResponse = await chatWithWorkflow(
userMessageContent,
this.workflow,
);
const { messages } = req.body as { messages: ChatMessage[] };
const streamResponse = await chatWithWorkflow(this.workflow, messages);
await pipeExpressResponse(res, streamResponse);
} catch (error) {
console.error("Chat error:", error);
......
......@@ -5,16 +5,17 @@ import {
Workflow,
WorkflowContext,
WorkflowEvent,
type ChatMessage,
type ChatResponseChunk,
} from "llamaindex";
import { ReadableStream } from "stream/web";
import { AgentRunEvent, type AgentInput } from "./type";
export async function chatWithWorkflow(
message: string,
workflow: Workflow<null, AgentInput, ChatResponseChunk>,
messages: ChatMessage[],
): Promise<Response> {
const context = workflow.run({ message });
const context = workflow.run({ messages });
const { stream, dataStream } = await createStreamFromWorkflowContext(context);
const response = LlamaIndexAdapter.toDataStreamResponse(stream, {
data: dataStream,
......
import {
Workflow,
WorkflowEvent,
type ChatMessage,
type ChatResponseChunk,
type MessageContent,
} from "llamaindex";
export type AgentInput = {
message: MessageContent;
streaming?: boolean;
messages: ChatMessage[];
};
export type AgentRunEventType = "text" | "progress";
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment