From b5b316afa48f3ebcf36c269e72337e2eb5712edc Mon Sep 17 00:00:00 2001
From: thucpn <thucsh2@gmail.com>
Date: Tue, 18 Mar 2025 11:58:27 +0700
Subject: [PATCH] use chat history as input of start event

---
 packages/server/src/server.ts          | 16 +++-------------
 packages/server/src/workflow/stream.ts |  5 +++--
 packages/server/src/workflow/type.ts   |  5 ++---
 3 files changed, 8 insertions(+), 18 deletions(-)

diff --git a/packages/server/src/server.ts b/packages/server/src/server.ts
index 128f8ef95..338a17ad6 100644
--- a/packages/server/src/server.ts
+++ b/packages/server/src/server.ts
@@ -1,23 +1,18 @@
-import type { Message } from "ai";
 import express from "express";
 import type { ChatMessage } from "llamaindex";
-import { getUserMessageContent, pipeExpressResponse } from "./helper";
+import { pipeExpressResponse } from "./helper";
 import { chatWithWorkflow } from "./workflow/stream";
 import type { ServerWorkflow } from "./workflow/type";
 
 export interface LlamaIndexServerParams {
   workflow: ServerWorkflow;
   port?: number;
-  callbacks?: {
-    beforeChat?: (messages: ChatMessage[]) => void;
-  };
 }
 
 export class LlamaIndexServer {
   app: express.Application;
   workflow: ServerWorkflow;
   port: number;
-  callbacks?: LlamaIndexServerParams["callbacks"];
 
   constructor({ workflow, port = 3000 }: LlamaIndexServerParams) {
     this.app = express();
@@ -31,13 +26,8 @@ export class LlamaIndexServer {
     res: express.Response,
   ) => {
     try {
-      const { messages } = req.body as { messages: Message[] };
-      const userMessageContent = getUserMessageContent(messages);
-      this.callbacks?.beforeChat?.(messages as ChatMessage[]);
-      const streamResponse = await chatWithWorkflow(
-        userMessageContent,
-        this.workflow,
-      );
+      const { messages } = req.body as { messages: ChatMessage[] };
+      const streamResponse = await chatWithWorkflow(this.workflow, messages);
       await pipeExpressResponse(res, streamResponse);
     } catch (error) {
       console.error("Chat error:", error);
diff --git a/packages/server/src/workflow/stream.ts b/packages/server/src/workflow/stream.ts
index 98a0de446..6996d8d46 100644
--- a/packages/server/src/workflow/stream.ts
+++ b/packages/server/src/workflow/stream.ts
@@ -5,16 +5,17 @@ import {
   Workflow,
   WorkflowContext,
   WorkflowEvent,
+  type ChatMessage,
   type ChatResponseChunk,
 } from "llamaindex";
 import { ReadableStream } from "stream/web";
 import { AgentRunEvent, type AgentInput } from "./type";
 
 export async function chatWithWorkflow(
-  message: string,
   workflow: Workflow<null, AgentInput, ChatResponseChunk>,
+  messages: ChatMessage[],
 ): Promise<Response> {
-  const context = workflow.run({ message });
+  const context = workflow.run({ messages });
   const { stream, dataStream } = await createStreamFromWorkflowContext(context);
   const response = LlamaIndexAdapter.toDataStreamResponse(stream, {
     data: dataStream,
diff --git a/packages/server/src/workflow/type.ts b/packages/server/src/workflow/type.ts
index c6141539f..3f08d1f31 100644
--- a/packages/server/src/workflow/type.ts
+++ b/packages/server/src/workflow/type.ts
@@ -1,13 +1,12 @@
 import {
   Workflow,
   WorkflowEvent,
+  type ChatMessage,
   type ChatResponseChunk,
-  type MessageContent,
 } from "llamaindex";
 
 export type AgentInput = {
-  message: MessageContent;
-  streaming?: boolean;
+  messages: ChatMessage[];
 };
 
 export type AgentRunEventType = "text" | "progress";
-- 
GitLab