Skip to content
Snippets Groups Projects
Commit c3d9b2f1 authored by thucpn's avatar thucpn
Browse files

support agent workflow

parent 68377421
No related branches found
No related tags found
No related merge requests found
import { LlamaIndexAdapter } from "ai"; import { type Message } from "ai";
import { IncomingMessage, ServerResponse } from "http"; import { IncomingMessage, ServerResponse } from "http";
import { type ChatMessage } from "llamaindex"; import { type ChatMessage } from "llamaindex";
import type { ServerWorkflow } from "../types"; import type { ServerWorkflow } from "../types";
...@@ -7,7 +7,7 @@ import { ...@@ -7,7 +7,7 @@ import {
pipeResponse, pipeResponse,
sendJSONResponse, sendJSONResponse,
} from "../utils/request"; } from "../utils/request";
import { createStreamFromWorkflowContext } from "../utils/stream"; import { runWorkflow } from "../utils/workflow";
export const handleChat = async ( export const handleChat = async (
workflow: ServerWorkflow, workflow: ServerWorkflow,
...@@ -16,7 +16,7 @@ export const handleChat = async ( ...@@ -16,7 +16,7 @@ export const handleChat = async (
) => { ) => {
try { try {
const body = await parseRequestBody(req); const body = await parseRequestBody(req);
const { messages } = body as { messages: ChatMessage[] }; const { messages } = body as { messages: Message[] };
const lastMessage = messages[messages.length - 1]; const lastMessage = messages[messages.length - 1];
if (lastMessage?.role !== "user") { if (lastMessage?.role !== "user") {
...@@ -25,15 +25,9 @@ export const handleChat = async ( ...@@ -25,15 +25,9 @@ export const handleChat = async (
}); });
} }
const userMessage = lastMessage.content; const userInput = lastMessage.content;
const chatHistory = messages.slice(0, -1); const chatHistory = messages.slice(0, -1) as ChatMessage[];
const streamResponse = await runWorkflow(workflow, userInput, chatHistory);
const context = workflow.run({ userMessage, chatHistory });
const { stream, dataStream } =
await createStreamFromWorkflowContext(context);
const streamResponse = LlamaIndexAdapter.toDataStreamResponse(stream, {
data: dataStream,
});
pipeResponse(res, streamResponse); pipeResponse(res, streamResponse);
} catch (error) { } catch (error) {
console.error("Chat error:", error); console.error("Chat error:", error);
......
import { import {
AgentWorkflow,
Workflow, Workflow,
type ChatMessage, type ChatMessage,
type ChatResponseChunk, type ChatResponseChunk,
type MessageContent,
} from "llamaindex"; } from "llamaindex";
export type AgentInput = { export type AgentInput = {
userMessage: MessageContent; userInput: string; // the last message content from the user
chatHistory: ChatMessage[]; chatHistory: ChatMessage[]; // the previous chat history (not including the last message)
}; };
export type ServerWorkflow = Workflow<null, AgentInput, ChatResponseChunk>; export type ServerWorkflow =
| Workflow<null, AgentInput, ChatResponseChunk>
| AgentWorkflow;
import { StreamData, type JSONValue } from "ai"; import { LlamaIndexAdapter, StreamData, type JSONValue } from "ai";
import { import {
AgentWorkflow,
EngineResponse, EngineResponse,
StopEvent, StopEvent,
WorkflowContext, WorkflowContext,
WorkflowEvent, WorkflowEvent,
type ChatMessage,
type ChatResponseChunk, type ChatResponseChunk,
} from "llamaindex"; } from "llamaindex";
import { ReadableStream } from "stream/web"; import { ReadableStream } from "stream/web";
import type { ServerWorkflow } from "../types";
export async function createStreamFromWorkflowContext<Input, Output, Context>( export async function runWorkflow(
workflow: ServerWorkflow,
userInput: string,
chatHistory: ChatMessage[],
) {
if (workflow instanceof AgentWorkflow) {
const context = workflow.run(userInput, { chatHistory });
const { stream, dataStream } = await createStreamFromWorkflowContext(
// eslint-disable-next-line @typescript-eslint/no-explicit-any
context as any,
);
return LlamaIndexAdapter.toDataStreamResponse(stream, { data: dataStream });
}
const context = workflow.run({ userInput, chatHistory });
const { stream, dataStream } = await createStreamFromWorkflowContext(context);
return LlamaIndexAdapter.toDataStreamResponse(stream, { data: dataStream });
}
async function createStreamFromWorkflowContext<Input, Output, Context>(
context: WorkflowContext<Input, Output, Context>, context: WorkflowContext<Input, Output, Context>,
): Promise<{ stream: ReadableStream<EngineResponse>; dataStream: StreamData }> { ): Promise<{ stream: ReadableStream<EngineResponse>; dataStream: StreamData }> {
const dataStream = new StreamData(); const dataStream = new StreamData();
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment