diff --git a/packages/server/src/server.ts b/packages/server/src/server.ts
index 128f8ef95ceeb6ae8ce12db6c18e364182d5a2b0..338a17ad610004892ae6116c9812dec3bfc74036 100644
--- a/packages/server/src/server.ts
+++ b/packages/server/src/server.ts
@@ -1,23 +1,18 @@
-import type { Message } from "ai";
 import express from "express";
 import type { ChatMessage } from "llamaindex";
-import { getUserMessageContent, pipeExpressResponse } from "./helper";
+import { pipeExpressResponse } from "./helper";
 import { chatWithWorkflow } from "./workflow/stream";
 import type { ServerWorkflow } from "./workflow/type";
 
 export interface LlamaIndexServerParams {
   workflow: ServerWorkflow;
   port?: number;
-  callbacks?: {
-    beforeChat?: (messages: ChatMessage[]) => void;
-  };
 }
 
 export class LlamaIndexServer {
   app: express.Application;
   workflow: ServerWorkflow;
   port: number;
-  callbacks?: LlamaIndexServerParams["callbacks"];
 
   constructor({ workflow, port = 3000 }: LlamaIndexServerParams) {
     this.app = express();
@@ -31,13 +26,8 @@ export class LlamaIndexServer {
     res: express.Response,
   ) => {
     try {
-      const { messages } = req.body as { messages: Message[] };
-      const userMessageContent = getUserMessageContent(messages);
-      this.callbacks?.beforeChat?.(messages as ChatMessage[]);
-      const streamResponse = await chatWithWorkflow(
-        userMessageContent,
-        this.workflow,
-      );
+      const { messages } = req.body as { messages: ChatMessage[] };
+      const streamResponse = await chatWithWorkflow(this.workflow, messages);
       await pipeExpressResponse(res, streamResponse);
     } catch (error) {
       console.error("Chat error:", error);
diff --git a/packages/server/src/workflow/stream.ts b/packages/server/src/workflow/stream.ts
index 98a0de4464405abdb45d139ea37598c696f14404..6996d8d467eb75d3cf92258ef8e80f81abbdc869 100644
--- a/packages/server/src/workflow/stream.ts
+++ b/packages/server/src/workflow/stream.ts
@@ -5,16 +5,17 @@ import {
   Workflow,
   WorkflowContext,
   WorkflowEvent,
+  type ChatMessage,
   type ChatResponseChunk,
 } from "llamaindex";
 import { ReadableStream } from "stream/web";
 import { AgentRunEvent, type AgentInput } from "./type";
 
 export async function chatWithWorkflow(
-  message: string,
   workflow: Workflow<null, AgentInput, ChatResponseChunk>,
+  messages: ChatMessage[],
 ): Promise<Response> {
-  const context = workflow.run({ message });
+  const context = workflow.run({ messages });
   const { stream, dataStream } = await createStreamFromWorkflowContext(context);
   const response = LlamaIndexAdapter.toDataStreamResponse(stream, {
     data: dataStream,
diff --git a/packages/server/src/workflow/type.ts b/packages/server/src/workflow/type.ts
index c6141539f2fed04c2d78d13a826871d949b4331d..3f08d1f314e21213c5bed836f0eeb3f4c790fdc0 100644
--- a/packages/server/src/workflow/type.ts
+++ b/packages/server/src/workflow/type.ts
@@ -1,13 +1,12 @@
 import {
   Workflow,
   WorkflowEvent,
+  type ChatMessage,
   type ChatResponseChunk,
-  type MessageContent,
 } from "llamaindex";
 
 export type AgentInput = {
-  message: MessageContent;
-  streaming?: boolean;
+  messages: ChatMessage[];
 };
 
 export type AgentRunEventType = "text" | "progress";