From c6bad7d951b56f0e9ba6a4bae05e0f0abe571509 Mon Sep 17 00:00:00 2001
From: Thuc Pham <51660321+thucpn@users.noreply.github.com>
Date: Fri, 15 Nov 2024 12:33:30 +0700
Subject: [PATCH] docs(next): chat UI with rsc example (#1481)

Co-authored-by: Marcus Schiesser <mail@marcusschiesser.de>
---
 apps/next/package.json                        |  2 +-
 apps/next/src/app/api/chat/route.ts           | 29 +++----
 apps/next/src/components/demo/chat.tsx        |  8 --
 .../src/components/demo/chat/api/demo.tsx     | 16 ++++
 .../components/demo/chat/rsc/ai-action.tsx    | 55 ++++++++++++++
 .../components/demo/chat/rsc/chat-section.tsx | 33 ++++++++
 .../src/components/demo/chat/rsc/demo.tsx     |  8 ++
 .../components/demo/chat/rsc/use-chat-rsc.tsx | 41 ++++++++++
 .../docs/llamaindex/guide/{ => chat}/chat.mdx |  9 ++-
 .../docs/llamaindex/guide/chat/meta.json      |  6 ++
 .../docs/llamaindex/guide/chat/rsc.mdx        | 68 +++++++++++++++++
 pnpm-lock.yaml                                | 76 ++++++++++++++-----
 12 files changed, 306 insertions(+), 45 deletions(-)
 delete mode 100644 apps/next/src/components/demo/chat.tsx
 create mode 100644 apps/next/src/components/demo/chat/api/demo.tsx
 create mode 100644 apps/next/src/components/demo/chat/rsc/ai-action.tsx
 create mode 100644 apps/next/src/components/demo/chat/rsc/chat-section.tsx
 create mode 100644 apps/next/src/components/demo/chat/rsc/demo.tsx
 create mode 100644 apps/next/src/components/demo/chat/rsc/use-chat-rsc.tsx
 rename apps/next/src/content/docs/llamaindex/guide/{ => chat}/chat.mdx (85%)
 create mode 100644 apps/next/src/content/docs/llamaindex/guide/chat/meta.json
 create mode 100644 apps/next/src/content/docs/llamaindex/guide/chat/rsc.mdx

diff --git a/apps/next/package.json b/apps/next/package.json
index 04d344487..8779eca9f 100644
--- a/apps/next/package.json
+++ b/apps/next/package.json
@@ -12,7 +12,7 @@
   },
   "dependencies": {
     "@icons-pack/react-simple-icons": "^10.1.0",
-    "@llamaindex/chat-ui": "0.0.5",
+    "@llamaindex/chat-ui": "0.0.8",
     "@llamaindex/cloud": "workspace:*",
     "@llamaindex/core": "workspace:*",
     "@llamaindex/node-parser": "workspace:*",
diff --git a/apps/next/src/app/api/chat/route.ts b/apps/next/src/app/api/chat/route.ts
index 94a65fc53..5852ff295 100644
--- a/apps/next/src/app/api/chat/route.ts
+++ b/apps/next/src/app/api/chat/route.ts
@@ -1,6 +1,6 @@
-import { LlamaIndexAdapter, type Message } from "ai";
-import { SimpleChatEngine, type ChatMessage } from "llamaindex";
-import { NextResponse, type NextRequest } from "next/server";
+import { Message } from "ai";
+import { simulateReadableStream } from "ai/test";
+import { NextRequest, NextResponse } from "next/server";
 
 export async function POST(request: NextRequest) {
   try {
@@ -12,16 +12,19 @@ export async function POST(request: NextRequest) {
         { status: 400 },
       );
     }
-
-    const chatEngine = new SimpleChatEngine();
-
-    return LlamaIndexAdapter.toDataStreamResponse(
-      await chatEngine.chat({
-        message: userMessage.content,
-        chatHistory: messages as ChatMessage[],
-        stream: true,
-      }),
-      {},
+    const mockResponse = `Hello! This is a mock response to: ${userMessage.content}`;
+    return new Response(
+      simulateReadableStream({
+        chunkDelayInMs: 20,
+        values: mockResponse.split(" ").map((t) => `0:"${t} "\n`),
+      }).pipeThrough(new TextEncoderStream()),
+      {
+        status: 200,
+        headers: {
+          "X-Vercel-AI-Data-Stream": "v1",
+          "Content-Type": "text/plain; charset=utf-8",
+        },
+      },
     );
   } catch (error) {
     const detail = (error as Error).message;
diff --git a/apps/next/src/components/demo/chat.tsx b/apps/next/src/components/demo/chat.tsx
deleted file mode 100644
index b816ffd6e..000000000
--- a/apps/next/src/components/demo/chat.tsx
+++ /dev/null
@@ -1,8 +0,0 @@
-"use client";
-import { ChatSection } from "@llamaindex/chat-ui";
-import { useChat } from "ai/react";
-
-export const ChatDemo = () => {
-  const handler = useChat();
-  return <ChatSection handler={handler} />;
-};
diff --git a/apps/next/src/components/demo/chat/api/demo.tsx b/apps/next/src/components/demo/chat/api/demo.tsx
new file mode 100644
index 000000000..85bb750e1
--- /dev/null
+++ b/apps/next/src/components/demo/chat/api/demo.tsx
@@ -0,0 +1,16 @@
+"use client";
+import { ChatInput, ChatMessages, ChatSection } from "@llamaindex/chat-ui";
+import { useChat } from "ai/react";
+
+export const ChatDemo = () => {
+  const handler = useChat();
+  return (
+    <ChatSection handler={handler}>
+      <ChatMessages>
+        <ChatMessages.List className="h-auto max-h-[400px]" />
+        <ChatMessages.Actions />
+      </ChatMessages>
+      <ChatInput />
+    </ChatSection>
+  );
+};
diff --git a/apps/next/src/components/demo/chat/rsc/ai-action.tsx b/apps/next/src/components/demo/chat/rsc/ai-action.tsx
new file mode 100644
index 000000000..f8d944ad1
--- /dev/null
+++ b/apps/next/src/components/demo/chat/rsc/ai-action.tsx
@@ -0,0 +1,55 @@
+import { Markdown } from "@llamaindex/chat-ui/widgets";
+import { generateId, Message, parseStreamPart } from "ai";
+import { createAI, createStreamableUI, getMutableAIState } from "ai/rsc";
+import { simulateReadableStream } from "ai/test";
+import { ReactNode } from "react";
+
+type ServerState = Message[];
+type FrontendState = Array<Message & { display: ReactNode }>;
+type Actions = {
+  chat: (message: Message) => Promise<Message & { display: ReactNode }>;
+};
+
+export const AI = createAI<ServerState, FrontendState, Actions>({
+  initialAIState: [],
+  initialUIState: [],
+  actions: {
+    chat: async (message: Message) => {
+      "use server";
+
+      const aiState = getMutableAIState<typeof AI>();
+      aiState.update((prev) => [...prev, message]);
+
+      const mockResponse = `Hello! This is a mock response to: ${message.content}`;
+      const responseStream = simulateReadableStream({
+        chunkDelayInMs: 20,
+        values: mockResponse.split(" ").map((t) => `0:"${t} "\n`),
+      });
+
+      const uiStream = createStreamableUI();
+      const assistantMessage: Message = {
+        id: generateId(),
+        role: "assistant",
+        content: "",
+      };
+
+      responseStream.pipeTo(
+        new WritableStream({
+          write: async (message) => {
+            assistantMessage.content += parseStreamPart(message).value;
+            uiStream.update(<Markdown content={assistantMessage.content} />);
+          },
+          close: () => {
+            aiState.done([...aiState.get(), assistantMessage]);
+            uiStream.done();
+          },
+        }),
+      );
+
+      return {
+        ...assistantMessage,
+        display: uiStream.value,
+      };
+    },
+  },
+});
diff --git a/apps/next/src/components/demo/chat/rsc/chat-section.tsx b/apps/next/src/components/demo/chat/rsc/chat-section.tsx
new file mode 100644
index 000000000..384cdae3b
--- /dev/null
+++ b/apps/next/src/components/demo/chat/rsc/chat-section.tsx
@@ -0,0 +1,33 @@
+"use client";
+
+import {
+  ChatInput,
+  ChatMessage,
+  ChatMessages,
+  ChatSection as ChatSectionUI,
+} from "@llamaindex/chat-ui";
+import { useChatRSC } from "./use-chat-rsc";
+
+export const ChatSectionRSC = () => {
+  const handler = useChatRSC();
+  return (
+    <ChatSectionUI handler={handler}>
+      <ChatMessages>
+        <ChatMessages.List className="h-auto max-h-[400px]">
+          {handler.messages.map((message, index) => (
+            <ChatMessage
+              key={index}
+              message={message}
+              isLast={index === handler.messages.length - 1}
+            >
+              <ChatMessage.Avatar />
+              <ChatMessage.Content>{message.display}</ChatMessage.Content>
+            </ChatMessage>
+          ))}
+          <ChatMessages.Loading />
+        </ChatMessages.List>
+      </ChatMessages>
+      <ChatInput />
+    </ChatSectionUI>
+  );
+};
diff --git a/apps/next/src/components/demo/chat/rsc/demo.tsx b/apps/next/src/components/demo/chat/rsc/demo.tsx
new file mode 100644
index 000000000..76a2cfaa8
--- /dev/null
+++ b/apps/next/src/components/demo/chat/rsc/demo.tsx
@@ -0,0 +1,8 @@
+import { AI } from "./ai-action";
+import { ChatSectionRSC } from "./chat-section";
+
+export const ChatDemoRSC = () => (
+  <AI>
+    <ChatSectionRSC />
+  </AI>
+);
diff --git a/apps/next/src/components/demo/chat/rsc/use-chat-rsc.tsx b/apps/next/src/components/demo/chat/rsc/use-chat-rsc.tsx
new file mode 100644
index 000000000..f7d1f5e77
--- /dev/null
+++ b/apps/next/src/components/demo/chat/rsc/use-chat-rsc.tsx
@@ -0,0 +1,41 @@
+"use client";
+
+import { useActions } from "ai/rsc";
+
+import { generateId, Message } from "ai";
+import { useUIState } from "ai/rsc";
+import { useState } from "react";
+import { AI } from "./ai-action";
+
+export function useChatRSC() {
+  const [input, setInput] = useState<string>("");
+  const [isLoading, setIsLoading] = useState<boolean>(false);
+  const [messages, setMessages] = useUIState<typeof AI>();
+  const { chat } = useActions<typeof AI>();
+
+  const append = async (message: Omit<Message, "id">) => {
+    const newMsg: Message = { ...message, id: generateId() };
+
+    setIsLoading(true);
+    try {
+      setMessages((prev) => [...prev, { ...newMsg, display: message.content }]);
+      const assistantMsg = await chat(newMsg);
+      setMessages((prev) => [...prev, assistantMsg]);
+    } catch (error) {
+      console.error(error);
+    }
+    setIsLoading(false);
+    setInput("");
+
+    return message.content;
+  };
+
+  return {
+    input,
+    setInput,
+    isLoading,
+    messages,
+    setMessages,
+    append,
+  };
+}
diff --git a/apps/next/src/content/docs/llamaindex/guide/chat.mdx b/apps/next/src/content/docs/llamaindex/guide/chat/chat.mdx
similarity index 85%
rename from apps/next/src/content/docs/llamaindex/guide/chat.mdx
rename to apps/next/src/content/docs/llamaindex/guide/chat/chat.mdx
index 8a5ef245e..4cbaea157 100644
--- a/apps/next/src/content/docs/llamaindex/guide/chat.mdx
+++ b/apps/next/src/content/docs/llamaindex/guide/chat/chat.mdx
@@ -1,8 +1,8 @@
 ---
-title: Chat-UI 
-description: Use chat-ui to add a chat interface to your LlamaIndexTS application.
+title: Using API Route
+description: Chat interface for your LlamaIndexTS application using API Route
 ---
-import { ChatDemo } from '../../../../components/demo/chat';
+import { ChatDemo } from '../../../../../components/demo/chat/api/demo';
 import "@llamaindex/chat-ui/styles/code.css";
 import "@llamaindex/chat-ui/styles/katex.css";
 
@@ -26,7 +26,7 @@ This is the simplest way to add a chat interface to your application. Copy the f
 
 ```json doc-gen:file
 {
-  "file": "./src/components/demo/chat.tsx",
+  "file": "./src/components/demo/chat/api/demo.tsx",
 	"codeblock": true
 }
 ```
@@ -37,6 +37,7 @@ Combining both, you're getting a fully functional chat interface:
 
 <ChatDemo />
 
+
 ## Next Steps
 
 The steps above are the bare minimum to get a chat interface working. From here, you can go two ways:
diff --git a/apps/next/src/content/docs/llamaindex/guide/chat/meta.json b/apps/next/src/content/docs/llamaindex/guide/chat/meta.json
new file mode 100644
index 000000000..7550c3cc9
--- /dev/null
+++ b/apps/next/src/content/docs/llamaindex/guide/chat/meta.json
@@ -0,0 +1,6 @@
+{
+  "title": "Chat-UI",
+  "description": "Use chat-ui to add a chat interface to your LlamaIndexTS application.",
+  "defaultOpen": false,
+  "pages": ["chat", "rsc"]
+}
diff --git a/apps/next/src/content/docs/llamaindex/guide/chat/rsc.mdx b/apps/next/src/content/docs/llamaindex/guide/chat/rsc.mdx
new file mode 100644
index 000000000..d0a8ebc26
--- /dev/null
+++ b/apps/next/src/content/docs/llamaindex/guide/chat/rsc.mdx
@@ -0,0 +1,68 @@
+---
+title: Using Next.js RSC
+description: Chat interface for your LlamaIndexTS application using Next.js RSC
+---
+import { ChatDemoRSC } from '../../../../../components/demo/chat/rsc/demo';
+import "@llamaindex/chat-ui/styles/code.css";
+import "@llamaindex/chat-ui/styles/katex.css";
+
+Using [chat-ui](https://github.com/run-llama/chat-ui), it's easy to add a chat interface to your LlamaIndexTS application using [Next.js RSC](https://nextjs.org/docs/app/building-your-application/rendering/server-components) and [Vercel AI RSC](https://sdk.vercel.ai/docs/ai-sdk-rsc/overview).
+
+With RSC, the chat messages are not returned as JSON from the server (like when using an [API route](./chat)), instead the chat message components are rendered on the server side.
+This is for example useful for rendering a whole chat history on the server before sending it to the client. [Check here](https://sdk.vercel.ai/docs/getting-started/navigating-the-library#when-to-use-ai-sdk-rsc), for a discussion of when to use use RSC.
+
+For implementing a chat interface with RSC, you need to create an AI action and then connect the chat interface to use it.
+
+## Create an AI action
+
+First, define an [AI context provider](https://sdk.vercel.ai/examples/rsc/state-management/ai-ui-states) with a chat server action:
+
+```json doc-gen:file
+{
+  "file": "./src/components/demo/chat/rsc/ai-action.tsx",
+	"codeblock": true
+}
+```
+
+The chat server action is using LlamaIndexTS to generate a response based on the chat history and the user input.
+
+## Create the chat UI
+
+The entrypoint of our application initializes the AI provider for the application and adds a `ChatSection` component:
+
+```json doc-gen:file
+{
+  "file": "./src/components/demo/chat/rsc/demo.tsx",
+	"codeblock": true
+}
+```
+
+The `ChatSection` component is created by using chat components from @llamaindex/chat-ui:
+
+```json doc-gen:file
+{
+  "file": "./src/components/demo/chat/rsc/chat-section.tsx",
+	"codeblock": true
+}
+```
+
+It is using a `useChatRSC` hook to conntect the chat interface to the `chat` AI action that we defined earlier:
+
+```json doc-gen:file
+{
+  "file": "./src/components/demo/chat/rsc/use-chat-rsc.tsx",
+	"codeblock": true
+}
+```
+
+## Try RSC Chat ⬇️
+
+<ChatDemoRSC />
+
+## Next Steps
+
+The steps above are the bare minimum to get a chat interface working with RSC. From here, you can go two ways:
+
+1. Use our [full-stack RSC example](https://github.com/run-llama/nextjs-rsc) based on [create-llama](https://github.com/run-llama/create-llama) to get started quickly with a fully working chat interface or
+2. Learn more about [AI RSC](https://sdk.vercel.ai/examples/rsc), [chat-ui](https://github.com/run-llama/chat-ui) and [LlamaIndexTS](https://github.com/run-llama/llamaindex-ts) to customize the chat interface and AI actions to your needs.
+
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 76f855a23..e7d2f0171 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -127,8 +127,8 @@ importers:
         specifier: ^10.1.0
         version: 10.1.0(react@18.3.1)
       '@llamaindex/chat-ui':
-        specifier: 0.0.5
-        version: 0.0.5(@types/react-dom@18.3.1)(@types/react@18.3.12)(encoding@0.1.13)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
+        specifier: 0.0.8
+        version: 0.0.8(@types/react-dom@18.3.1)(@types/react@18.3.12)(encoding@0.1.13)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
       '@llamaindex/cloud':
         specifier: workspace:*
         version: link:../../packages/cloud
@@ -173,10 +173,10 @@ importers:
         version: 1.1.3(@types/react-dom@18.3.1)(@types/react@18.3.12)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
       '@vercel/functions':
         specifier: ^1.5.0
-        version: 1.5.0(@aws-sdk/credential-provider-web-identity@3.679.0(@aws-sdk/client-sts@3.682.0))
+        version: 1.5.0(@aws-sdk/credential-provider-web-identity@3.679.0)
       ai:
         specifier: ^3.4.31
-        version: 3.4.31(openai@4.69.0(encoding@0.1.13)(zod@3.23.8))(react@18.3.1)(sswr@2.1.0(svelte@5.1.9))(svelte@5.1.9)(vue@3.5.12(typescript@5.6.3))(zod@3.23.8)
+        version: 3.4.31(openai@4.69.0(encoding@0.1.13)(zod@3.23.8))(react@18.3.1)(sswr@2.1.0)(vue@3.5.12(typescript@5.6.3))(zod@3.23.8)
       class-variance-authority:
         specifier: ^0.7.0
         version: 0.7.0
@@ -257,7 +257,7 @@ importers:
         version: 1.22.2
       shiki-magic-move:
         specifier: ^0.5.0
-        version: 0.5.0(react@18.3.1)(shiki@1.22.2)(svelte@5.1.9)(vue@3.5.12(typescript@5.6.3))
+        version: 0.5.0(react@18.3.1)(shiki@1.22.2)(vue@3.5.12(typescript@5.6.3))
       swr:
         specifier: ^2.2.5
         version: 2.2.5(react@18.3.1)
@@ -428,7 +428,7 @@ importers:
     dependencies:
       ai:
         specifier: ^3.3.21
-        version: 3.4.31(openai@4.69.0(encoding@0.1.13)(zod@3.23.8))(react@18.3.1)(sswr@2.1.0(svelte@5.1.9))(svelte@5.1.9)(vue@3.5.12(typescript@5.6.3))(zod@3.23.8)
+        version: 3.4.31(openai@4.69.0(zod@3.23.8))(react@18.3.1)(sswr@2.1.0(svelte@5.1.9))(svelte@5.1.9)(vue@3.5.12(typescript@5.6.3))(zod@3.23.8)
       llamaindex:
         specifier: workspace:*
         version: link:../../../packages/llamaindex
@@ -606,7 +606,7 @@ importers:
         version: 2.4.9
       chromadb:
         specifier: ^1.8.1
-        version: 1.9.2(cohere-ai@7.14.0(encoding@0.1.13))(encoding@0.1.13)(openai@4.69.0(encoding@0.1.13)(zod@3.23.8))
+        version: 1.9.2(cohere-ai@7.14.0(encoding@0.1.13))(encoding@0.1.13)(openai@4.69.0(encoding@0.1.13))
       commander:
         specifier: ^12.1.0
         version: 12.1.0
@@ -738,7 +738,7 @@ importers:
         version: 1.1.0(@types/react@18.3.12)(react@18.3.1)
       ai:
         specifier: ^3.3.21
-        version: 3.4.31(openai@4.69.0(encoding@0.1.13)(zod@3.23.8))(react@18.3.1)(sswr@2.1.0(svelte@5.1.9))(svelte@5.1.9)(vue@3.5.12(typescript@5.6.3))(zod@3.23.8)
+        version: 3.4.31(openai@4.69.0(zod@3.23.8))(react@18.3.1)(sswr@2.1.0(svelte@5.1.9))(svelte@5.1.9)(vue@3.5.12(typescript@5.6.3))(zod@3.23.8)
       class-variance-authority:
         specifier: ^0.7.0
         version: 0.7.0
@@ -3755,8 +3755,8 @@ packages:
   '@leichtgewicht/ip-codec@2.0.5':
     resolution: {integrity: sha512-Vo+PSpZG2/fmgmiNzYK9qWRh8h/CHrwD0mo1h1DzL4yzHNSfWYujGTYsWGreD000gcgmZ7K4Ys6Tx9TxtsKdDw==}
 
-  '@llamaindex/chat-ui@0.0.5':
-    resolution: {integrity: sha512-nGlhtAeIlVBtHAsCtBN5f+/iJpIHQ7uFf43ebye/wATf5tT12HUl3tP5O/+XC7nyXXwByjk5KAEN+hk3aC1Cug==}
+  '@llamaindex/chat-ui@0.0.8':
+    resolution: {integrity: sha512-yTWgxDzJD6F8w17CNIIJkSBE+hyd+WRqhF6lSElqo3k7CQyhtYr8cOD7cS6K8gBqaI3Xha5EPqvqV0+enJA0Mg==}
     peerDependencies:
       react: ^18.2.0
 
@@ -16534,7 +16534,7 @@ snapshots:
 
   '@leichtgewicht/ip-codec@2.0.5': {}
 
-  '@llamaindex/chat-ui@0.0.5(@types/react-dom@18.3.1)(@types/react@18.3.12)(encoding@0.1.13)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)':
+  '@llamaindex/chat-ui@0.0.8(@types/react-dom@18.3.1)(@types/react@18.3.12)(encoding@0.1.13)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)':
     dependencies:
       '@llamaindex/pdf-viewer': 1.2.0(@types/react@18.3.12)(encoding@0.1.13)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
       '@radix-ui/react-collapsible': 1.1.1(@types/react-dom@18.3.1)(@types/react@18.3.12)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
@@ -18616,7 +18616,7 @@ snapshots:
 
   '@upstash/vector@1.1.7': {}
 
-  '@vercel/functions@1.5.0(@aws-sdk/credential-provider-web-identity@3.679.0(@aws-sdk/client-sts@3.682.0))':
+  '@vercel/functions@1.5.0(@aws-sdk/credential-provider-web-identity@3.679.0)':
     optionalDependencies:
       '@aws-sdk/credential-provider-web-identity': 3.679.0(@aws-sdk/client-sts@3.682.0)
 
@@ -18894,7 +18894,7 @@ snapshots:
       clean-stack: 2.2.0
       indent-string: 4.0.0
 
-  ai@3.4.31(openai@4.69.0(encoding@0.1.13)(zod@3.23.8))(react@18.3.1)(sswr@2.1.0(svelte@5.1.9))(svelte@5.1.9)(vue@3.5.12(typescript@5.6.3))(zod@3.23.8):
+  ai@3.4.31(openai@4.69.0(encoding@0.1.13)(zod@3.23.8))(react@18.3.1)(sswr@2.1.0)(vue@3.5.12(typescript@5.6.3))(zod@3.23.8):
     dependencies:
       '@ai-sdk/provider': 0.0.26
       '@ai-sdk/provider-utils': 1.0.22(zod@3.23.8)
@@ -18913,6 +18913,30 @@ snapshots:
       openai: 4.69.0(encoding@0.1.13)(zod@3.23.8)
       react: 18.3.1
       sswr: 2.1.0(svelte@5.1.9)
+      zod: 3.23.8
+    transitivePeerDependencies:
+      - solid-js
+      - vue
+
+  ai@3.4.31(openai@4.69.0(zod@3.23.8))(react@18.3.1)(sswr@2.1.0(svelte@5.1.9))(svelte@5.1.9)(vue@3.5.12(typescript@5.6.3))(zod@3.23.8):
+    dependencies:
+      '@ai-sdk/provider': 0.0.26
+      '@ai-sdk/provider-utils': 1.0.22(zod@3.23.8)
+      '@ai-sdk/react': 0.0.70(react@18.3.1)(zod@3.23.8)
+      '@ai-sdk/solid': 0.0.54(zod@3.23.8)
+      '@ai-sdk/svelte': 0.0.57(svelte@5.1.9)(zod@3.23.8)
+      '@ai-sdk/ui-utils': 0.0.50(zod@3.23.8)
+      '@ai-sdk/vue': 0.0.59(vue@3.5.12(typescript@5.6.3))(zod@3.23.8)
+      '@opentelemetry/api': 1.9.0
+      eventsource-parser: 1.1.2
+      json-schema: 0.4.0
+      jsondiffpatch: 0.6.0
+      secure-json-parse: 2.7.0
+      zod-to-json-schema: 3.23.5(zod@3.23.8)
+    optionalDependencies:
+      openai: 4.69.0(zod@3.23.8)
+      react: 18.3.1
+      sswr: 2.1.0(svelte@5.1.9)
       svelte: 5.1.9
       zod: 3.23.8
     transitivePeerDependencies:
@@ -19600,7 +19624,7 @@ snapshots:
     transitivePeerDependencies:
       - encoding
 
-  chromadb@1.9.2(cohere-ai@7.14.0(encoding@0.1.13))(encoding@0.1.13)(openai@4.69.0(encoding@0.1.13)(zod@3.23.8)):
+  chromadb@1.9.2(cohere-ai@7.14.0(encoding@0.1.13))(encoding@0.1.13)(openai@4.69.0(encoding@0.1.13)):
     dependencies:
       cliui: 8.0.1
       isomorphic-fetch: 3.0.0(encoding@0.1.13)
@@ -20687,7 +20711,7 @@ snapshots:
       debug: 4.3.7
       enhanced-resolve: 5.17.1
       eslint: 9.14.0(jiti@2.4.0)
-      eslint-module-utils: 2.12.0(@typescript-eslint/parser@8.13.0(eslint@9.14.0(jiti@2.4.0))(typescript@5.6.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.3)(eslint@9.14.0(jiti@2.4.0))
+      eslint-module-utils: 2.12.0(@typescript-eslint/parser@8.13.0(eslint@9.14.0(jiti@2.4.0))(typescript@5.6.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.3(@typescript-eslint/parser@8.13.0(eslint@9.14.0(jiti@2.4.0))(typescript@5.6.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.31.0)(eslint@9.14.0(jiti@2.4.0)))(eslint@9.14.0(jiti@2.4.0))
       fast-glob: 3.3.2
       get-tsconfig: 4.8.1
       is-bun-module: 1.1.0
@@ -20700,7 +20724,7 @@ snapshots:
       - eslint-import-resolver-webpack
       - supports-color
 
-  eslint-module-utils@2.12.0(@typescript-eslint/parser@8.13.0(eslint@9.14.0(jiti@2.4.0))(typescript@5.6.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.3)(eslint@9.14.0(jiti@2.4.0)):
+  eslint-module-utils@2.12.0(@typescript-eslint/parser@8.13.0(eslint@9.14.0(jiti@2.4.0))(typescript@5.6.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.3(@typescript-eslint/parser@8.13.0(eslint@9.14.0(jiti@2.4.0))(typescript@5.6.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.31.0)(eslint@9.14.0(jiti@2.4.0)))(eslint@9.14.0(jiti@2.4.0)):
     dependencies:
       debug: 3.2.7
     optionalDependencies:
@@ -20722,7 +20746,7 @@ snapshots:
       doctrine: 2.1.0
       eslint: 9.14.0(jiti@2.4.0)
       eslint-import-resolver-node: 0.3.9
-      eslint-module-utils: 2.12.0(@typescript-eslint/parser@8.13.0(eslint@9.14.0(jiti@2.4.0))(typescript@5.6.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.3)(eslint@9.14.0(jiti@2.4.0))
+      eslint-module-utils: 2.12.0(@typescript-eslint/parser@8.13.0(eslint@9.14.0(jiti@2.4.0))(typescript@5.6.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.3(@typescript-eslint/parser@8.13.0(eslint@9.14.0(jiti@2.4.0))(typescript@5.6.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.31.0)(eslint@9.14.0(jiti@2.4.0)))(eslint@9.14.0(jiti@2.4.0))
       hasown: 2.0.2
       is-core-module: 2.15.1
       is-glob: 4.0.3
@@ -24388,6 +24412,21 @@ snapshots:
     transitivePeerDependencies:
       - encoding
 
+  openai@4.69.0(zod@3.23.8):
+    dependencies:
+      '@types/node': 18.19.64
+      '@types/node-fetch': 2.6.11
+      abort-controller: 3.0.0
+      agentkeepalive: 4.5.0
+      form-data-encoder: 1.7.2
+      formdata-node: 4.4.1
+      node-fetch: 2.7.0(encoding@0.1.13)
+    optionalDependencies:
+      zod: 3.23.8
+    transitivePeerDependencies:
+      - encoding
+    optional: true
+
   openapi-sampler@1.5.1:
     dependencies:
       '@types/json-schema': 7.0.15
@@ -26259,14 +26298,13 @@ snapshots:
       interpret: 1.4.0
       rechoir: 0.6.2
 
-  shiki-magic-move@0.5.0(react@18.3.1)(shiki@1.22.2)(svelte@5.1.9)(vue@3.5.12(typescript@5.6.3)):
+  shiki-magic-move@0.5.0(react@18.3.1)(shiki@1.22.2)(vue@3.5.12(typescript@5.6.3)):
     dependencies:
       diff-match-patch-es: 0.1.1
       ohash: 1.1.4
     optionalDependencies:
       react: 18.3.1
       shiki: 1.22.2
-      svelte: 5.1.9
       vue: 3.5.12(typescript@5.6.3)
 
   shiki@1.22.2:
-- 
GitLab