diff --git a/.changeset/spicy-colts-dream.md b/.changeset/spicy-colts-dream.md
new file mode 100644
index 0000000000000000000000000000000000000000..392dca0f4080ff720d367d8d53adfc0fe3cebc8a
--- /dev/null
+++ b/.changeset/spicy-colts-dream.md
@@ -0,0 +1,5 @@
+---
+"create-llama": patch
+---
+
+feat: support showing image on chat message
diff --git a/packages/create-llama/helpers/index.ts b/packages/create-llama/helpers/index.ts
index e4f3ddfa00859b8ce7749dc6b2c8185af3ccf8ac..3f8dad54b473c1f5057b4be4ecc1895117e387a9 100644
--- a/packages/create-llama/helpers/index.ts
+++ b/packages/create-llama/helpers/index.ts
@@ -162,6 +162,10 @@ export const installTemplate = async (
       props.openAiKey,
       props.vectorDb,
     );
+  } else {
+    // this is a frontend for a full-stack app, create .env file with model information
+    const content = `MODEL=${props.model}\nNEXT_PUBLIC_MODEL=${props.model}\n`;
+    await fs.writeFile(path.join(props.root, ".env"), content);
   }
 };
 
diff --git a/packages/create-llama/templates/types/streaming/nextjs/app/api/chat/llamaindex-stream.ts b/packages/create-llama/templates/types/streaming/nextjs/app/api/chat/llamaindex-stream.ts
index 12328de875d8a59a5501db7fbf2aa0d763a3dbaa..5ac376d63d83618ee22a9dcd42286a56bfe59620 100644
--- a/packages/create-llama/templates/types/streaming/nextjs/app/api/chat/llamaindex-stream.ts
+++ b/packages/create-llama/templates/types/streaming/nextjs/app/api/chat/llamaindex-stream.ts
@@ -1,17 +1,43 @@
 import {
+  JSONValue,
   createCallbacksTransformer,
   createStreamDataTransformer,
+  experimental_StreamData,
   trimStartOfStreamHelper,
   type AIStreamCallbacksAndOptions,
 } from "ai";
 
-function createParser(res: AsyncGenerator<any>) {
+type ParserOptions = {
+  image_url?: string;
+};
+
+function createParser(
+  res: AsyncGenerator<any>,
+  data: experimental_StreamData,
+  opts?: ParserOptions,
+) {
   const trimStartOfStream = trimStartOfStreamHelper();
   return new ReadableStream<string>({
+    start() {
+      // if image_url is provided, send it via the data stream
+      if (opts?.image_url) {
+        const message: JSONValue = {
+          type: "image_url",
+          image_url: {
+            url: opts.image_url,
+          },
+        };
+        data.append(message);
+      } else {
+        data.append({}); // send an empty image response for the user's message
+      }
+    },
     async pull(controller): Promise<void> {
       const { value, done } = await res.next();
       if (done) {
         controller.close();
+        data.append({}); // send an empty image response for the assistant's message
+        data.close();
         return;
       }
 
@@ -25,11 +51,16 @@ function createParser(res: AsyncGenerator<any>) {
 
 export function LlamaIndexStream(
   res: AsyncGenerator<any>,
-  callbacks?: AIStreamCallbacksAndOptions,
-): ReadableStream {
-  return createParser(res)
-    .pipeThrough(createCallbacksTransformer(callbacks))
-    .pipeThrough(
-      createStreamDataTransformer(callbacks?.experimental_streamData),
-    );
+  opts?: {
+    callbacks?: AIStreamCallbacksAndOptions;
+    parserOptions?: ParserOptions;
+  },
+): { stream: ReadableStream; data: experimental_StreamData } {
+  const data = new experimental_StreamData();
+  return {
+    stream: createParser(res, data, opts?.parserOptions)
+      .pipeThrough(createCallbacksTransformer(opts?.callbacks))
+      .pipeThrough(createStreamDataTransformer(true)),
+    data,
+  };
 }
diff --git a/packages/create-llama/templates/types/streaming/nextjs/app/api/chat/route.ts b/packages/create-llama/templates/types/streaming/nextjs/app/api/chat/route.ts
index ff00a3894e02727f3e2e6036127e29843bf0e122..a4a9f30b7549a92d1bbfd6ea8a0a99415f733f5e 100644
--- a/packages/create-llama/templates/types/streaming/nextjs/app/api/chat/route.ts
+++ b/packages/create-llama/templates/types/streaming/nextjs/app/api/chat/route.ts
@@ -1,5 +1,5 @@
 import { Message, StreamingTextResponse } from "ai";
-import { MessageContent, OpenAI } from "llamaindex";
+import { ChatMessage, MessageContent, OpenAI } from "llamaindex";
 import { NextRequest, NextResponse } from "next/server";
 import { createChatEngine } from "./engine";
 import { LlamaIndexStream } from "./llamaindex-stream";
@@ -42,7 +42,7 @@ export async function POST(request: NextRequest) {
     }
 
     const llm = new OpenAI({
-      model: process.env.MODEL || "gpt-3.5-turbo",
+      model: (process.env.MODEL as any) ?? "gpt-3.5-turbo",
       maxTokens: 512,
     });
 
@@ -55,15 +55,19 @@ export async function POST(request: NextRequest) {
 
     const response = await chatEngine.chat(
       lastMessageContent as MessageContent,
-      messages,
+      messages as ChatMessage[],
       true,
     );
 
     // Transform the response into a readable stream
-    const stream = LlamaIndexStream(response);
+    const { stream, data: streamData } = LlamaIndexStream(response, {
+      parserOptions: {
+        image_url: data?.imageUrl,
+      },
+    });
 
     // Return a StreamingTextResponse, which can be consumed by the client
-    return new StreamingTextResponse(stream);
+    return new StreamingTextResponse(stream, {}, streamData);
   } catch (error) {
     console.error("[LlamaIndex]", error);
     return NextResponse.json(
diff --git a/packages/create-llama/templates/types/streaming/nextjs/app/components/chat-section.tsx b/packages/create-llama/templates/types/streaming/nextjs/app/components/chat-section.tsx
index e51eeef329741f7833b3119f9f44c6378921bb1d..08afc25487999ad9809aa01c9f95c726c39db374 100644
--- a/packages/create-llama/templates/types/streaming/nextjs/app/components/chat-section.tsx
+++ b/packages/create-llama/templates/types/streaming/nextjs/app/components/chat-section.tsx
@@ -1,6 +1,8 @@
 "use client";
 
 import { useChat } from "ai/react";
+import { useMemo } from "react";
+import { insertDataIntoMessages } from "./transform";
 import { ChatInput, ChatMessages } from "./ui/chat";
 
 export default function ChatSection() {
@@ -12,6 +14,7 @@ export default function ChatSection() {
     handleInputChange,
     reload,
     stop,
+    data,
   } = useChat({
     api: process.env.NEXT_PUBLIC_CHAT_API,
     headers: {
@@ -19,10 +22,14 @@ export default function ChatSection() {
     },
   });
 
+  const transformedMessages = useMemo(() => {
+    return insertDataIntoMessages(messages, data);
+  }, [messages, data]);
+
   return (
     <div className="space-y-4 max-w-5xl w-full">
       <ChatMessages
-        messages={messages}
+        messages={transformedMessages}
         isLoading={isLoading}
         reload={reload}
         stop={stop}
diff --git a/packages/create-llama/templates/types/streaming/nextjs/app/components/transform.ts b/packages/create-llama/templates/types/streaming/nextjs/app/components/transform.ts
new file mode 100644
index 0000000000000000000000000000000000000000..5af8fb3cba4845331ecf16ca7c4bf223606e5644
--- /dev/null
+++ b/packages/create-llama/templates/types/streaming/nextjs/app/components/transform.ts
@@ -0,0 +1,19 @@
+import { JSONValue, Message } from "ai";
+
+export const isValidMessageData = (rawData: JSONValue | undefined) => {
+  if (!rawData || typeof rawData !== "object") return false;
+  if (Object.keys(rawData).length === 0) return false;
+  return true;
+};
+
+export const insertDataIntoMessages = (
+  messages: Message[],
+  data: JSONValue[] | undefined,
+) => {
+  if (!data) return messages;
+  messages.forEach((message, i) => {
+    const rawData = data[i];
+    if (isValidMessageData(rawData)) message.data = rawData;
+  });
+  return messages;
+};
diff --git a/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/chat-message.tsx b/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/chat-message.tsx
index 9ada08a3d7498a204ef3403b2148007f302936bf..808d9b08019cd7cb0e8703fdf49c2cfd78625dbf 100644
--- a/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/chat-message.tsx
+++ b/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/chat-message.tsx
@@ -1,18 +1,49 @@
 import { Check, Copy } from "lucide-react";
 
+import { JSONValue, Message } from "ai";
+import Image from "next/image";
 import { Button } from "../button";
 import ChatAvatar from "./chat-avatar";
-import { Message } from "./chat.interface";
 import Markdown from "./markdown";
 import { useCopyToClipboard } from "./use-copy-to-clipboard";
 
+interface ChatMessageImageData {
+  type: "image_url";
+  image_url: {
+    url: string;
+  };
+}
+
+// This component will parse message data and render the appropriate UI.
+function ChatMessageData({ messageData }: { messageData: JSONValue }) {
+  const { image_url, type } = messageData as unknown as ChatMessageImageData;
+  if (type === "image_url") {
+    return (
+      <div className="rounded-md max-w-[200px] shadow-md">
+        <Image
+          src={image_url.url}
+          width={0}
+          height={0}
+          sizes="100vw"
+          style={{ width: "100%", height: "auto" }}
+          alt=""
+        />
+      </div>
+    );
+  }
+  return null;
+}
+
 export default function ChatMessage(chatMessage: Message) {
   const { isCopied, copyToClipboard } = useCopyToClipboard({ timeout: 2000 });
   return (
     <div className="flex items-start gap-4 pr-5 pt-5">
       <ChatAvatar role={chatMessage.role} />
       <div className="group flex flex-1 justify-between gap-2">
-        <div className="flex-1">
+        <div className="flex-1 space-y-4">
+          {chatMessage.data && (
+            <ChatMessageData messageData={chatMessage.data} />
+          )}
           <Markdown content={chatMessage.content} />
         </div>
         <Button
diff --git a/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/chat.interface.ts b/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/chat.interface.ts
index 584a63f7333b86531148dd7849c3233c82fdbf16..5b9f22539c97428160f22c459125def2ad1b61fe 100644
--- a/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/chat.interface.ts
+++ b/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/chat.interface.ts
@@ -1,8 +1,4 @@
-export interface Message {
-  id: string;
-  content: string;
-  role: string;
-}
+import { Message } from "ai";
 
 export interface ChatHandler {
   messages: Message[];
diff --git a/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/index.ts b/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/index.ts
index c7990f9c13fff2f6cc17faf0a87c7d42f5326471..112ef39a8bdf7ef1f3ed3183eb7bc596ab596362 100644
--- a/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/index.ts
+++ b/packages/create-llama/templates/types/streaming/nextjs/app/components/ui/chat/index.ts
@@ -1,5 +1,5 @@
 import ChatInput from "./chat-input";
 import ChatMessages from "./chat-messages";
 
-export { type ChatHandler, type Message } from "./chat.interface";
+export { type ChatHandler } from "./chat.interface";
 export { ChatInput, ChatMessages };