From f331c207fc37ab39be30ee554b5a043482aa3e98 Mon Sep 17 00:00:00 2001
From: Marcus Schiesser <mail@marcusschiesser.de>
Date: Fri, 27 Oct 2023 17:49:21 +0700
Subject: [PATCH] added streaming for llamaindex

---
 .../nextjs/app/api/chat/llamaindex-stream.ts  | 35 ++++++++++++++
 .../streaming/nextjs/app/api/chat/route.ts    | 46 +++++++------------
 templates/streaming/nextjs/package.json       |  1 -
 3 files changed, 52 insertions(+), 30 deletions(-)
 create mode 100644 templates/streaming/nextjs/app/api/chat/llamaindex-stream.ts

diff --git a/templates/streaming/nextjs/app/api/chat/llamaindex-stream.ts b/templates/streaming/nextjs/app/api/chat/llamaindex-stream.ts
new file mode 100644
index 00000000..12328de8
--- /dev/null
+++ b/templates/streaming/nextjs/app/api/chat/llamaindex-stream.ts
@@ -0,0 +1,35 @@
+import {
+  createCallbacksTransformer,
+  createStreamDataTransformer,
+  trimStartOfStreamHelper,
+  type AIStreamCallbacksAndOptions,
+} from "ai";
+
+function createParser(res: AsyncGenerator<any>) {
+  const trimStartOfStream = trimStartOfStreamHelper();
+  return new ReadableStream<string>({
+    async pull(controller): Promise<void> {
+      const { value, done } = await res.next();
+      if (done) {
+        controller.close();
+        return;
+      }
+
+      const text = trimStartOfStream(value ?? "");
+      if (text) {
+        controller.enqueue(text);
+      }
+    },
+  });
+}
+
+export function LlamaIndexStream(
+  res: AsyncGenerator<any>,
+  callbacks?: AIStreamCallbacksAndOptions,
+): ReadableStream {
+  return createParser(res)
+    .pipeThrough(createCallbacksTransformer(callbacks))
+    .pipeThrough(
+      createStreamDataTransformer(callbacks?.experimental_streamData),
+    );
+}
diff --git a/templates/streaming/nextjs/app/api/chat/route.ts b/templates/streaming/nextjs/app/api/chat/route.ts
index 06432075..fb54dbc8 100644
--- a/templates/streaming/nextjs/app/api/chat/route.ts
+++ b/templates/streaming/nextjs/app/api/chat/route.ts
@@ -1,50 +1,38 @@
-import { OpenAIStream, StreamingTextResponse } from "ai";
+import { Message, StreamingTextResponse } from "ai";
+import { OpenAI, SimpleChatEngine } from "llamaindex";
 import { NextRequest, NextResponse } from "next/server";
-import OpenAI from "openai";
+import { LlamaIndexStream } from "./llamaindex-stream";
+
 export const runtime = "nodejs";
 export const dynamic = "force-dynamic";
 
-const openai = new OpenAI({
-  apiKey: process.env.OPENAI_API_KEY,
-});
-
 export async function POST(request: NextRequest) {
   try {
     const body = await request.json();
-    const { messages } = body;
-    if (!messages) {
+    const { messages }: { messages: Message[] } = body;
+    const lastMessage = messages.pop();
+    if (!messages || !lastMessage || lastMessage.role !== "user") {
       return NextResponse.json(
         {
-          error: "messages are required in the request body",
+          error:
+            "messages are required in the request body and the last message must be from the user",
         },
         { status: 400 },
       );
     }
 
-    // const llm = new OpenAI({
-    //   model: "gpt-3.5-turbo",
-    // });
-
-    // const chatEngine = new SimpleChatEngine({
-    //   llm,
-    // });
-
-    // const response = await chatEngine.chat(message, chatHistory);
-    // const result: ChatMessage = {
-    //   role: "assistant",
-    //   content: response.response,
-    // };
-
-    // return NextResponse.json({ result });
+    const llm = new OpenAI({
+      model: "gpt-3.5-turbo",
+    });
 
-    const response = await openai.chat.completions.create({
-      model: "gpt-4",
-      stream: true,
-      messages,
+    const chatEngine = new SimpleChatEngine({
+      llm,
     });
 
+    const response = await chatEngine.chat(lastMessage.content, messages, true);
+
     // Transform the response into a readable stream
-    const stream = OpenAIStream(response);
+    const stream = LlamaIndexStream(response);
 
     // Return a StreamingTextResponse, which can be consumed by the client
     return new StreamingTextResponse(stream);
diff --git a/templates/streaming/nextjs/package.json b/templates/streaming/nextjs/package.json
index 399c20bc..e9f23201 100644
--- a/templates/streaming/nextjs/package.json
+++ b/templates/streaming/nextjs/package.json
@@ -11,7 +11,6 @@
     "ai": "^2",
     "llamaindex": "0.0.31",
     "next": "^13",
-    "openai": "^4.14.0",
     "react": "^18",
     "react-dom": "^18"
   },
-- 
GitLab