From 8e124e5b6379ee8c7553ac863475d340b786b55f Mon Sep 17 00:00:00 2001 From: Thuc Pham <51660321+thucpn@users.noreply.github.com> Date: Mon, 15 Jan 2024 17:57:20 +0700 Subject: [PATCH] feat: support showing image for chat message in NextJS (#368) --- .changeset/spicy-colts-dream.md | 5 ++ packages/create-llama/helpers/index.ts | 4 ++ .../nextjs/app/api/chat/llamaindex-stream.ts | 47 +++++++++++++++---- .../streaming/nextjs/app/api/chat/route.ts | 14 ++++-- .../nextjs/app/components/chat-section.tsx | 9 +++- .../nextjs/app/components/transform.ts | 19 ++++++++ .../app/components/ui/chat/chat-message.tsx | 35 +++++++++++++- .../app/components/ui/chat/chat.interface.ts | 6 +-- .../nextjs/app/components/ui/chat/index.ts | 2 +- 9 files changed, 119 insertions(+), 22 deletions(-) create mode 100644 .changeset/spicy-colts-dream.md create mode 100644 packages/create-llama/templates/types/streaming/nextjs/app/components/transform.ts diff --git a/.changeset/spicy-colts-dream.md b/.changeset/spicy-colts-dream.md new file mode 100644 index 000000000..392dca0f4 --- /dev/null +++ b/.changeset/spicy-colts-dream.md @@ -0,0 +1,5 @@ +--- +"create-llama": patch +--- + +feat: support showing image on chat message diff --git a/packages/create-llama/helpers/index.ts b/packages/create-llama/helpers/index.ts index e4f3ddfa0..3f8dad54b 100644 --- a/packages/create-llama/helpers/index.ts +++ b/packages/create-llama/helpers/index.ts @@ -162,6 +162,10 @@ export const installTemplate = async ( props.openAiKey, props.vectorDb, ); + } else { + // this is a frontend for a full-stack app, create .env file with model information + const content = `MODEL=${props.model}\nNEXT_PUBLIC_MODEL=${props.model}\n`; + await fs.writeFile(path.join(props.root, ".env"), content); } }; diff --git a/packages/create-llama/templates/types/streaming/nextjs/app/api/chat/llamaindex-stream.ts b/packages/create-llama/templates/types/streaming/nextjs/app/api/chat/llamaindex-stream.ts index 12328de87..5ac376d63 100644 --- a/packages/create-llama/templates/types/streaming/nextjs/app/api/chat/llamaindex-stream.ts +++ b/packages/create-llama/templates/types/streaming/nextjs/app/api/chat/llamaindex-stream.ts @@ -1,17 +1,43 @@ import { + JSONValue, createCallbacksTransformer, createStreamDataTransformer, + experimental_StreamData, trimStartOfStreamHelper, type AIStreamCallbacksAndOptions, } from "ai"; -function createParser(res: AsyncGenerator<any>) { +type ParserOptions = { + image_url?: string; +}; + +function createParser( + res: AsyncGenerator<any>, + data: experimental_StreamData, + opts?: ParserOptions, +) { const trimStartOfStream = trimStartOfStreamHelper(); return new ReadableStream<string>({ + start() { + // if image_url is provided, send it via the data stream + if (opts?.image_url) { + const message: JSONValue = { + type: "image_url", + image_url: { + url: opts.image_url, + }, + }; + data.append(message); + } else { + data.append({}); // send an empty image response for the user's message + } + }, async pull(controller): Promise<void> { const { value, done } = await res.next(); if (done) { controller.close(); + data.append({}); // send an empty image response for the assistant's message + data.close(); return; } @@ -25,11 +51,16 @@ function createParser(res: AsyncGenerator<any>) { export function LlamaIndexStream( res: AsyncGenerator<any>, - callbacks?: AIStreamCallbacksAndOptions, -): ReadableStream { - return createParser(res) - .pipeThrough(createCallbacksTransformer(callbacks)) - .pipeThrough( - createStreamDataTransformer(callbacks?.experimental_streamData), - ); + opts?: { + callbacks?: AIStreamCallbacksAndOptions; + parserOptions?: ParserOptions; + }, +): { stream: ReadableStream; data: experimental_StreamData } { + const data = new experimental_StreamData(); + return { + stream: createParser(res, data, opts?.parserOptions) + .pipeThrough(createCallbacksTransformer(opts?.callbacks)) + .pipeThrough(createStreamDataTransformer(true)), + data, + }; } diff --git a/packages/create-llama/templates/types/streaming/nextjs/app/api/chat/route.ts b/packages/create-llama/templates/types/streaming/nextjs/app/api/chat/route.ts index ff00a3894..a4a9f30b7 100644 --- a/packages/create-llama/templates/types/streaming/nextjs/app/api/chat/route.ts +++ b/packages/create-llama/templates/types/streaming/nextjs/app/api/chat/route.ts @@ -1,5 +1,5 @@ import { Message, StreamingTextResponse } from "ai"; -import { MessageContent, OpenAI } from "llamaindex"; +import { ChatMessage, MessageContent, OpenAI } from "llamaindex"; import { NextRequest, NextResponse } from "next/server"; import { createChatEngine } from "./engine"; import { LlamaIndexStream } from "./llamaindex-stream"; @@ -42,7 +42,7 @@ export async function POST(request: NextRequest) { } const llm = new OpenAI({ - model: process.env.MODEL || "gpt-3.5-turbo", + model: (process.env.MODEL as any) ?? "gpt-3.5-turbo", maxTokens: 512, }); @@ -55,15 +55,19 @@ export async function POST(request: NextRequest) { const response = await chatEngine.chat( lastMessageContent as MessageContent, - messages, + messages as ChatMessage[], true, ); // Transform the response into a readable stream - const stream = LlamaIndexStream(response); + const { stream, data: streamData } = LlamaIndexStream(response, { + parserOptions: { + image_url: data?.imageUrl, + }, + }); // Return a StreamingTextResponse, which can be consumed by the client - return new StreamingTextResponse(stream); + return new StreamingTextResponse(stream, {}, streamData); } catch (error) { console.error("[LlamaIndex]", error); return NextResponse.json( diff --git a/packages/create-llama/templates/types/streaming/nextjs/app/components/chat-section.tsx b/packages/create-llama/templates/types/streaming/nextjs/app/components/chat-section.tsx index e51eeef32..08afc2548 100644 --- a/packages/create-llama/templates/types/streaming/nextjs/app/components/chat-section.tsx +++ b/packages/create-llama/templates/types/streaming/nextjs/app/components/chat-section.tsx @@ -1,6 +1,8 @@ "use client"; import { useChat } from "ai/react"; +import { useMemo } from "react"; +import { insertDataIntoMessages } from "./transform"; import { ChatInput, ChatMessages } from "./ui/chat"; export default function ChatSection() { @@ -12,6 +14,7 @@ export default function ChatSection() { handleInputChange, reload, stop, + data, } = useChat({ api: process.env.NEXT_PUBLIC_CHAT_API, headers: { @@ -19,10 +22,14 @@ export default function ChatSection() { }, }); + const transformedMessages = useMemo(() => { + return insertDataIntoMessages(messages, data); + }, [messages, data]); + return ( <div className="space-y-4 max-w-5xl w-full"> <ChatMessages - messages={messages} + messages={transformedMessages} isLoading={isLoading} reload={reload} stop={stop} diff --git a/packages/create-llama/templates/types/streaming/nextjs/app/components/transform.ts b/packages/create-llama/templates/types/streaming/nextjs/app/components/transform.ts new file mode 100644 index 000000000..5af8fb3cb --- /dev/null +++ b/packages/create-llama/templates/types/streaming/nextjs/app/components/transform.ts @@ -0,0 +1,19 @@ +import { JSONValue, Message } from "ai"; + +export const isValidMessageData = (rawData: JSONValue | undefined) => { + if (!rawData || typeof rawData !== "object") return false; + if (Object.keys(rawData).length === 0) return false; + return true; +}; + +export const insertDataIntoMessages = ( + messages: Message[], + data: JSONValue[] | undefined, +) => { + if (!data) return messages; + messages.forEach((message, i) => { + const rawData = data[i]; + if (isValidMessageData(rawData)) message.data = rawData; + }); + return messages; +}; diff --git a/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/chat-message.tsx b/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/chat-message.tsx index 9ada08a3d..808d9b080 100644 --- a/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/chat-message.tsx +++ b/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/chat-message.tsx @@ -1,18 +1,49 @@ import { Check, Copy } from "lucide-react"; +import { JSONValue, Message } from "ai"; +import Image from "next/image"; import { Button } from "../button"; import ChatAvatar from "./chat-avatar"; -import { Message } from "./chat.interface"; import Markdown from "./markdown"; import { useCopyToClipboard } from "./use-copy-to-clipboard"; +interface ChatMessageImageData { + type: "image_url"; + image_url: { + url: string; + }; +} + +// This component will parse message data and render the appropriate UI. +function ChatMessageData({ messageData }: { messageData: JSONValue }) { + const { image_url, type } = messageData as unknown as ChatMessageImageData; + if (type === "image_url") { + return ( + <div className="rounded-md max-w-[200px] shadow-md"> + <Image + src={image_url.url} + width={0} + height={0} + sizes="100vw" + style={{ width: "100%", height: "auto" }} + alt="" + /> + </div> + ); + } + return null; +} + export default function ChatMessage(chatMessage: Message) { const { isCopied, copyToClipboard } = useCopyToClipboard({ timeout: 2000 }); return ( <div className="flex items-start gap-4 pr-5 pt-5"> <ChatAvatar role={chatMessage.role} /> <div className="group flex flex-1 justify-between gap-2"> - <div className="flex-1"> + <div className="flex-1 space-y-4"> + {chatMessage.data && ( + <ChatMessageData messageData={chatMessage.data} /> + )} <Markdown content={chatMessage.content} /> </div> <Button diff --git a/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/chat.interface.ts b/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/chat.interface.ts index 584a63f73..5b9f22539 100644 --- a/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/chat.interface.ts +++ b/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/chat.interface.ts @@ -1,8 +1,4 @@ -export interface Message { - id: string; - content: string; - role: string; -} +import { Message } from "ai"; export interface ChatHandler { messages: Message[]; diff --git a/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/index.ts b/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/index.ts index c7990f9c1..112ef39a8 100644 --- a/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/index.ts +++ b/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/index.ts @@ -1,5 +1,5 @@ import ChatInput from "./chat-input"; import ChatMessages from "./chat-messages"; -export { type ChatHandler, type Message } from "./chat.interface"; +export { type ChatHandler } from "./chat.interface"; export { ChatInput, ChatMessages }; -- GitLab