Skip to content
Snippets Groups Projects
Commit 0f9765a2 authored by Thuc Pham's avatar Thuc Pham Committed by GitHub
Browse files

feat: support showing image for chat message in NextJS (#368)

parent 8462e81b
Branches
Tags
No related merge requests found
......@@ -162,6 +162,10 @@ export const installTemplate = async (
props.openAiKey,
props.vectorDb,
);
} else {
// this is a frontend for a full-stack app, create .env file with model information
const content = `MODEL=${props.model}\nNEXT_PUBLIC_MODEL=${props.model}\n`;
await fs.writeFile(path.join(props.root, ".env"), content);
}
};
......
import {
JSONValue,
createCallbacksTransformer,
createStreamDataTransformer,
experimental_StreamData,
trimStartOfStreamHelper,
type AIStreamCallbacksAndOptions,
} from "ai";
function createParser(res: AsyncGenerator<any>) {
type ParserOptions = {
image_url?: string;
};
function createParser(
res: AsyncGenerator<any>,
data: experimental_StreamData,
opts?: ParserOptions,
) {
const trimStartOfStream = trimStartOfStreamHelper();
return new ReadableStream<string>({
start() {
// if image_url is provided, send it via the data stream
if (opts?.image_url) {
const message: JSONValue = {
type: "image_url",
image_url: {
url: opts.image_url,
},
};
data.append(message);
} else {
data.append({}); // send an empty image response for the user's message
}
},
async pull(controller): Promise<void> {
const { value, done } = await res.next();
if (done) {
controller.close();
data.append({}); // send an empty image response for the assistant's message
data.close();
return;
}
......@@ -25,11 +51,16 @@ function createParser(res: AsyncGenerator<any>) {
export function LlamaIndexStream(
res: AsyncGenerator<any>,
callbacks?: AIStreamCallbacksAndOptions,
): ReadableStream {
return createParser(res)
.pipeThrough(createCallbacksTransformer(callbacks))
.pipeThrough(
createStreamDataTransformer(callbacks?.experimental_streamData),
);
opts?: {
callbacks?: AIStreamCallbacksAndOptions;
parserOptions?: ParserOptions;
},
): { stream: ReadableStream; data: experimental_StreamData } {
const data = new experimental_StreamData();
return {
stream: createParser(res, data, opts?.parserOptions)
.pipeThrough(createCallbacksTransformer(opts?.callbacks))
.pipeThrough(createStreamDataTransformer(true)),
data,
};
}
import { Message, StreamingTextResponse } from "ai";
import { MessageContent, OpenAI } from "llamaindex";
import { ChatMessage, MessageContent, OpenAI } from "llamaindex";
import { NextRequest, NextResponse } from "next/server";
import { createChatEngine } from "./engine";
import { LlamaIndexStream } from "./llamaindex-stream";
......@@ -42,7 +42,7 @@ export async function POST(request: NextRequest) {
}
const llm = new OpenAI({
model: process.env.MODEL || "gpt-3.5-turbo",
model: (process.env.MODEL as any) ?? "gpt-3.5-turbo",
maxTokens: 512,
});
......@@ -55,15 +55,19 @@ export async function POST(request: NextRequest) {
const response = await chatEngine.chat(
lastMessageContent as MessageContent,
messages,
messages as ChatMessage[],
true,
);
// Transform the response into a readable stream
const stream = LlamaIndexStream(response);
const { stream, data: streamData } = LlamaIndexStream(response, {
parserOptions: {
image_url: data?.imageUrl,
},
});
// Return a StreamingTextResponse, which can be consumed by the client
return new StreamingTextResponse(stream);
return new StreamingTextResponse(stream, {}, streamData);
} catch (error) {
console.error("[LlamaIndex]", error);
return NextResponse.json(
......
"use client";
import { useChat } from "ai/react";
import { useMemo } from "react";
import { insertDataIntoMessages } from "./transform";
import { ChatInput, ChatMessages } from "./ui/chat";
export default function ChatSection() {
......@@ -12,6 +14,7 @@ export default function ChatSection() {
handleInputChange,
reload,
stop,
data,
} = useChat({
api: process.env.NEXT_PUBLIC_CHAT_API,
headers: {
......@@ -19,10 +22,14 @@ export default function ChatSection() {
},
});
const transformedMessages = useMemo(() => {
return insertDataIntoMessages(messages, data);
}, [messages, data]);
return (
<div className="space-y-4 max-w-5xl w-full">
<ChatMessages
messages={messages}
messages={transformedMessages}
isLoading={isLoading}
reload={reload}
stop={stop}
......
import { JSONValue, Message } from "ai";
export const isValidMessageData = (rawData: JSONValue | undefined) => {
if (!rawData || typeof rawData !== "object") return false;
if (Object.keys(rawData).length === 0) return false;
return true;
};
export const insertDataIntoMessages = (
messages: Message[],
data: JSONValue[] | undefined,
) => {
if (!data) return messages;
messages.forEach((message, i) => {
const rawData = data[i];
if (isValidMessageData(rawData)) message.data = rawData;
});
return messages;
};
import { Check, Copy } from "lucide-react";
import { JSONValue, Message } from "ai";
import Image from "next/image";
import { Button } from "../button";
import ChatAvatar from "./chat-avatar";
import { Message } from "./chat.interface";
import Markdown from "./markdown";
import { useCopyToClipboard } from "./use-copy-to-clipboard";
interface ChatMessageImageData {
type: "image_url";
image_url: {
url: string;
};
}
// This component will parse message data and render the appropriate UI.
function ChatMessageData({ messageData }: { messageData: JSONValue }) {
const { image_url, type } = messageData as unknown as ChatMessageImageData;
if (type === "image_url") {
return (
<div className="rounded-md max-w-[200px] shadow-md">
<Image
src={image_url.url}
width={0}
height={0}
sizes="100vw"
style={{ width: "100%", height: "auto" }}
alt=""
/>
</div>
);
}
return null;
}
export default function ChatMessage(chatMessage: Message) {
const { isCopied, copyToClipboard } = useCopyToClipboard({ timeout: 2000 });
return (
<div className="flex items-start gap-4 pr-5 pt-5">
<ChatAvatar role={chatMessage.role} />
<div className="group flex flex-1 justify-between gap-2">
<div className="flex-1">
<div className="flex-1 space-y-4">
{chatMessage.data && (
<ChatMessageData messageData={chatMessage.data} />
)}
<Markdown content={chatMessage.content} />
</div>
<Button
......
export interface Message {
id: string;
content: string;
role: string;
}
import { Message } from "ai";
export interface ChatHandler {
messages: Message[];
......
import ChatInput from "./chat-input";
import ChatMessages from "./chat-messages";
export { type ChatHandler, type Message } from "./chat.interface";
export { type ChatHandler } from "./chat.interface";
export { ChatInput, ChatMessages };
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment