diff --git a/packages/create-llama/templates/streaming/nextjs/.env.example b/packages/create-llama/templates/streaming/nextjs/.env.example
new file mode 100644
index 0000000000000000000000000000000000000000..7ac0a01551a65a68003c2615d510269b5d6a77f6
--- /dev/null
+++ b/packages/create-llama/templates/streaming/nextjs/.env.example
@@ -0,0 +1,3 @@
+# Rename this file to `.env.local` to use environment variables locally with `next dev`
+# https://nextjs.org/docs/pages/building-your-application/configuring/environment-variables
+MY_HOST="example.com"
diff --git a/packages/create-llama/templates/streaming/nextjs/README-template.md b/packages/create-llama/templates/streaming/nextjs/README-template.md
new file mode 100644
index 0000000000000000000000000000000000000000..1509ded7c3be489d369b94d6d6a286d496f488d8
--- /dev/null
+++ b/packages/create-llama/templates/streaming/nextjs/README-template.md
@@ -0,0 +1,30 @@
+This is a [LlamaIndex](https://www.llamaindex.ai/) project using [Next.js](https://nextjs.org/) bootstrapped with [`create-llama`](https://github.com/run-llama/LlamaIndexTS/tree/main/packages/create-llama).
+
+## Getting Started
+
+First, install the dependencies:
+
+```
+npm install
+```
+
+Second, run the development server:
+
+```
+npm run dev
+```
+
+Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.
+
+You can start editing the page by modifying `app/page.tsx`. The page auto-updates as you edit the file.
+
+This project uses [`next/font`](https://nextjs.org/docs/basic-features/font-optimization) to automatically optimize and load Inter, a custom Google Font.
+
+## Learn More
+
+To learn more about LlamaIndex, take a look at the following resources:
+
+- [LlamaIndex Documentation](https://docs.llamaindex.ai) - learn about LlamaIndex (Python features).
+- [LlamaIndexTS Documentation](https://ts.llamaindex.ai) - learn about LlamaIndex (Typescript features).
+
+You can check out [the LlamaIndexTS GitHub repository](https://github.com/run-llama/LlamaIndexTS) - your feedback and contributions are welcome!
diff --git a/packages/create-llama/templates/streaming/nextjs/app/api/chat/route.ts b/packages/create-llama/templates/streaming/nextjs/app/api/chat/route.ts
new file mode 100644
index 0000000000000000000000000000000000000000..06432075c0b4a900cbbe6ee5c0785b3460f22d3b
--- /dev/null
+++ b/packages/create-llama/templates/streaming/nextjs/app/api/chat/route.ts
@@ -0,0 +1,62 @@
+import { OpenAIStream, StreamingTextResponse } from "ai";
+import { NextRequest, NextResponse } from "next/server";
+import OpenAI from "openai";
+export const runtime = "nodejs";
+export const dynamic = "force-dynamic";
+
+const openai = new OpenAI({
+  apiKey: process.env.OPENAI_API_KEY,
+});
+
+export async function POST(request: NextRequest) {
+  try {
+    const body = await request.json();
+    const { messages } = body;
+    if (!messages) {
+      return NextResponse.json(
+        {
+          error: "messages are required in the request body",
+        },
+        { status: 400 },
+      );
+    }
+
+    // const llm = new OpenAI({
+    //   model: "gpt-3.5-turbo",
+    // });
+
+    // const chatEngine = new SimpleChatEngine({
+    //   llm,
+    // });
+
+    // const response = await chatEngine.chat(message, chatHistory);
+    // const result: ChatMessage = {
+    //   role: "assistant",
+    //   content: response.response,
+    // };
+
+    // return NextResponse.json({ result });
+
+    const response = await openai.chat.completions.create({
+      model: "gpt-4",
+      stream: true,
+      messages,
+    });
+
+    // Transform the response into a readable stream
+    const stream = OpenAIStream(response);
+
+    // Return a StreamingTextResponse, which can be consumed by the client
+    return new StreamingTextResponse(stream);
+  } catch (error) {
+    console.error("[LlamaIndex]", error);
+    return NextResponse.json(
+      {
+        error: (error as Error).message,
+      },
+      {
+        status: 500,
+      },
+    );
+  }
+}
diff --git a/packages/create-llama/templates/streaming/nextjs/app/components/chat-avatar.tsx b/packages/create-llama/templates/streaming/nextjs/app/components/chat-avatar.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..14c7b06f575ec4584334ace9ee0c5929160d98ce
--- /dev/null
+++ b/packages/create-llama/templates/streaming/nextjs/app/components/chat-avatar.tsx
@@ -0,0 +1,34 @@
+"use client";
+
+import { Message } from "ai/react";
+import Image from "next/image";
+
+export default function ChatAvatar(chatMessage: Message) {
+  if (chatMessage.role === "user") {
+    return (
+      <div className="flex h-8 w-8 shrink-0 select-none items-center justify-center rounded-md border shadow bg-background">
+        <svg
+          xmlns="http://www.w3.org/2000/svg"
+          viewBox="0 0 256 256"
+          fill="currentColor"
+          className="h-4 w-4"
+        >
+          <path d="M230.92 212c-15.23-26.33-38.7-45.21-66.09-54.16a72 72 0 1 0-73.66 0c-27.39 8.94-50.86 27.82-66.09 54.16a8 8 0 1 0 13.85 8c18.84-32.56 52.14-52 89.07-52s70.23 19.44 89.07 52a8 8 0 1 0 13.85-8ZM72 96a56 56 0 1 1 56 56 56.06 56.06 0 0 1-56-56Z"></path>
+        </svg>
+      </div>
+    );
+  }
+
+  return (
+    <div className="flex h-8 w-8 shrink-0 select-none items-center justify-center rounded-md border  bg-black text-white">
+      <Image
+        className="rounded-md"
+        src="/llama.png"
+        alt="Llama Logo"
+        width={24}
+        height={24}
+        priority
+      />
+    </div>
+  );
+}
diff --git a/packages/create-llama/templates/streaming/nextjs/app/components/chat-history.tsx b/packages/create-llama/templates/streaming/nextjs/app/components/chat-history.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..80f5e97d9f94290c4e3b6a83835954d3f6a71c9d
--- /dev/null
+++ b/packages/create-llama/templates/streaming/nextjs/app/components/chat-history.tsx
@@ -0,0 +1,33 @@
+"use client";
+
+import ChatItem from "@/app/components/chat-item";
+import { Message } from "ai/react";
+import { useEffect, useRef } from "react";
+
+export default function ChatHistory({ messages }: { messages: Message[] }) {
+  const scrollableChatContainerRef = useRef<HTMLDivElement>(null);
+
+  const scrollToBottom = () => {
+    if (scrollableChatContainerRef.current) {
+      scrollableChatContainerRef.current.scrollTop =
+        scrollableChatContainerRef.current.scrollHeight;
+    }
+  };
+
+  useEffect(() => {
+    scrollToBottom();
+  }, [messages.length]);
+
+  return (
+    <div className="w-full max-w-5xl p-4 bg-white rounded-xl shadow-xl">
+      <div
+        className="flex flex-col gap-5 divide-y h-[50vh] overflow-auto"
+        ref={scrollableChatContainerRef}
+      >
+        {messages.map((m: Message) => (
+          <ChatItem key={m.id} {...m} />
+        ))}
+      </div>
+    </div>
+  );
+}
diff --git a/packages/create-llama/templates/streaming/nextjs/app/components/chat-item.tsx b/packages/create-llama/templates/streaming/nextjs/app/components/chat-item.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..dc24fa0af5474aff96482c7a72f3ebdea8ffe94d
--- /dev/null
+++ b/packages/create-llama/templates/streaming/nextjs/app/components/chat-item.tsx
@@ -0,0 +1,13 @@
+"use client";
+
+import ChatAvatar from "@/app/components/chat-avatar";
+import { Message } from "ai/react";
+
+export default function ChatItem(chatMessage: Message) {
+  return (
+    <div className="flex items-start gap-4 pt-5">
+      <ChatAvatar {...chatMessage} />
+      <p className="break-words">{chatMessage.content}</p>
+    </div>
+  );
+}
diff --git a/packages/create-llama/templates/streaming/nextjs/app/components/chat-section.tsx b/packages/create-llama/templates/streaming/nextjs/app/components/chat-section.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..3e0b993a5e95a8bc8214c1441eac316dfbb2a7d7
--- /dev/null
+++ b/packages/create-llama/templates/streaming/nextjs/app/components/chat-section.tsx
@@ -0,0 +1,16 @@
+"use client";
+
+import MessageForm from "@/app/components/message-form";
+import { useChat } from "ai/react";
+import ChatHistory from "./chat-history";
+
+export default function ChatSection() {
+  const chat = useChat();
+
+  return (
+    <>
+      <ChatHistory messages={chat.messages} />
+      <MessageForm chat={chat} />
+    </>
+  );
+}
diff --git a/packages/create-llama/templates/streaming/nextjs/app/components/header.tsx b/packages/create-llama/templates/streaming/nextjs/app/components/header.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..2b0e488f769eff6700a282c2a6a77dd8d0a4dac8
--- /dev/null
+++ b/packages/create-llama/templates/streaming/nextjs/app/components/header.tsx
@@ -0,0 +1,28 @@
+import Image from "next/image";
+
+export default function Header() {
+  return (
+    <div className="z-10 max-w-5xl w-full items-center justify-between font-mono text-sm lg:flex">
+      <p className="fixed left-0 top-0 flex w-full justify-center border-b border-gray-300 bg-gradient-to-b from-zinc-200 pb-6 pt-8 backdrop-blur-2xl dark:border-neutral-800 dark:bg-zinc-800/30 dark:from-inherit lg:static lg:w-auto  lg:rounded-xl lg:border lg:bg-gray-200 lg:p-4 lg:dark:bg-zinc-800/30">
+        Get started by editing&nbsp;
+        <code className="font-mono font-bold">app/page.tsx</code>
+      </p>
+      <div className="fixed bottom-0 left-0 flex h-48 w-full items-end justify-center bg-gradient-to-t from-white via-white dark:from-black dark:via-black lg:static lg:h-auto lg:w-auto lg:bg-none">
+        <a
+          href="https://www.llamaindex.ai/"
+          className="flex items-center justify-center font-nunito text-lg font-bold gap-2"
+        >
+          <span>Built by LlamaIndex</span>
+          <Image
+            className="rounded-xl"
+            src="/llama.png"
+            alt="Llama Logo"
+            width={40}
+            height={40}
+            priority
+          />
+        </a>
+      </div>
+    </div>
+  );
+}
diff --git a/packages/create-llama/templates/streaming/nextjs/app/components/message-form.tsx b/packages/create-llama/templates/streaming/nextjs/app/components/message-form.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..ffd89762b56c5124b46fe692c374da97799262db
--- /dev/null
+++ b/packages/create-llama/templates/streaming/nextjs/app/components/message-form.tsx
@@ -0,0 +1,29 @@
+"use client";
+
+import { UseChatHelpers } from "ai/react";
+
+export default function MessageForm({ chat }: { chat: UseChatHelpers }) {
+  return (
+    <>
+      <form
+        onSubmit={chat.handleSubmit}
+        className="flex items-start justify-between w-full max-w-5xl p-4 bg-white rounded-xl shadow-xl gap-4"
+      >
+        <input
+          autoFocus
+          name="message"
+          placeholder="Type a message"
+          className="w-full p-4 rounded-xl shadow-inner flex-1"
+          value={chat.input}
+          onChange={chat.handleInputChange}
+        />
+        <button
+          type="submit"
+          className="p-4 text-white rounded-xl shadow-xl bg-gradient-to-r from-cyan-500 to-sky-500 disabled:opacity-50 disabled:cursor-not-allowed"
+        >
+          Send message
+        </button>
+      </form>
+    </>
+  );
+}
diff --git a/packages/create-llama/templates/streaming/nextjs/app/favicon.ico b/packages/create-llama/templates/streaming/nextjs/app/favicon.ico
new file mode 100644
index 0000000000000000000000000000000000000000..a1eaef62f2dfa895f1bbffc6595bb53d9604963e
Binary files /dev/null and b/packages/create-llama/templates/streaming/nextjs/app/favicon.ico differ
diff --git a/packages/create-llama/templates/streaming/nextjs/app/globals.css b/packages/create-llama/templates/streaming/nextjs/app/globals.css
new file mode 100644
index 0000000000000000000000000000000000000000..d85e2eec9ab40d8bc2d8cbad401f414ae8cd0ab2
--- /dev/null
+++ b/packages/create-llama/templates/streaming/nextjs/app/globals.css
@@ -0,0 +1,39 @@
+@tailwind base;
+@tailwind components;
+@tailwind utilities;
+
+:root {
+  --foreground-rgb: 0, 0, 0;
+  --background-start-rgb: 214, 219, 220;
+  --background-end-rgb: 255, 255, 255;
+}
+
+@media (prefers-color-scheme: dark) {
+  :root {
+    --foreground-rgb: 255, 255, 255;
+    --background-start-rgb: 0, 0, 0;
+    --background-end-rgb: 0, 0, 0;
+  }
+}
+
+body {
+  color: rgb(var(--foreground-rgb));
+  background: linear-gradient(
+      to bottom,
+      transparent,
+      rgb(var(--background-end-rgb))
+    )
+    rgb(var(--background-start-rgb));
+}
+
+.background-gradient {
+  background-color: #fff;
+  background-image: radial-gradient(
+      at 21% 11%,
+      rgba(186, 186, 233, 0.53) 0,
+      transparent 50%
+    ),
+    radial-gradient(at 85% 0, hsla(46, 57%, 78%, 0.52) 0, transparent 50%),
+    radial-gradient(at 91% 36%, rgba(194, 213, 255, 0.68) 0, transparent 50%),
+    radial-gradient(at 8% 40%, rgba(251, 218, 239, 0.46) 0, transparent 50%);
+}
diff --git a/packages/create-llama/templates/streaming/nextjs/app/layout.tsx b/packages/create-llama/templates/streaming/nextjs/app/layout.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..fb097706274bdfe4690e4953bba24d7acea0a021
--- /dev/null
+++ b/packages/create-llama/templates/streaming/nextjs/app/layout.tsx
@@ -0,0 +1,22 @@
+import type { Metadata } from "next";
+import { Inter } from "next/font/google";
+import "./globals.css";
+
+const inter = Inter({ subsets: ["latin"] });
+
+export const metadata: Metadata = {
+  title: "Create Llama App",
+  description: "Generated by create-llama",
+};
+
+export default function RootLayout({
+  children,
+}: {
+  children: React.ReactNode;
+}) {
+  return (
+    <html lang="en">
+      <body className={inter.className}>{children}</body>
+    </html>
+  );
+}
diff --git a/packages/create-llama/templates/streaming/nextjs/app/page.tsx b/packages/create-llama/templates/streaming/nextjs/app/page.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..ef00262b4a80049c70d66d1ceaced4afaabfd587
--- /dev/null
+++ b/packages/create-llama/templates/streaming/nextjs/app/page.tsx
@@ -0,0 +1,11 @@
+import Header from "@/app/components/header";
+import ChatSection from "./components/chat-section";
+
+export default function Home() {
+  return (
+    <main className="flex min-h-screen flex-col items-center gap-10 p-24 background-gradient">
+      <Header />
+      <ChatSection />
+    </main>
+  );
+}
diff --git a/packages/create-llama/templates/streaming/nextjs/eslintrc.json b/packages/create-llama/templates/streaming/nextjs/eslintrc.json
new file mode 100644
index 0000000000000000000000000000000000000000..bffb357a7122523ec94045523758c4b825b448ef
--- /dev/null
+++ b/packages/create-llama/templates/streaming/nextjs/eslintrc.json
@@ -0,0 +1,3 @@
+{
+  "extends": "next/core-web-vitals"
+}
diff --git a/packages/create-llama/templates/streaming/nextjs/gitignore b/packages/create-llama/templates/streaming/nextjs/gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..8f322f0d8f49570a594b865ef8916c428a01afc1
--- /dev/null
+++ b/packages/create-llama/templates/streaming/nextjs/gitignore
@@ -0,0 +1,35 @@
+# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
+
+# dependencies
+/node_modules
+/.pnp
+.pnp.js
+
+# testing
+/coverage
+
+# next.js
+/.next/
+/out/
+
+# production
+/build
+
+# misc
+.DS_Store
+*.pem
+
+# debug
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+
+# local env files
+.env*.local
+
+# vercel
+.vercel
+
+# typescript
+*.tsbuildinfo
+next-env.d.ts
diff --git a/packages/create-llama/templates/streaming/nextjs/next-env.d.ts b/packages/create-llama/templates/streaming/nextjs/next-env.d.ts
new file mode 100644
index 0000000000000000000000000000000000000000..4f11a03dc6cc37f2b5105c08f2e7b24c603ab2f4
--- /dev/null
+++ b/packages/create-llama/templates/streaming/nextjs/next-env.d.ts
@@ -0,0 +1,5 @@
+/// <reference types="next" />
+/// <reference types="next/image-types/global" />
+
+// NOTE: This file should not be edited
+// see https://nextjs.org/docs/basic-features/typescript for more information.
diff --git a/packages/create-llama/templates/streaming/nextjs/next.config.js b/packages/create-llama/templates/streaming/nextjs/next.config.js
new file mode 100644
index 0000000000000000000000000000000000000000..0b2c2bf17365d895255e161ffaf5714689d2d496
--- /dev/null
+++ b/packages/create-llama/templates/streaming/nextjs/next.config.js
@@ -0,0 +1,8 @@
+/** @type {import('next').NextConfig} */
+const nextConfig = {
+  experimental: {
+		serverComponentsExternalPackages: ["llamaindex"],
+	},
+}
+
+module.exports = nextConfig
diff --git a/packages/create-llama/templates/streaming/nextjs/package.json b/packages/create-llama/templates/streaming/nextjs/package.json
new file mode 100644
index 0000000000000000000000000000000000000000..399c20bcb709e92da9a7e7fbc4d2267bb0937030
--- /dev/null
+++ b/packages/create-llama/templates/streaming/nextjs/package.json
@@ -0,0 +1,29 @@
+{
+  "name": "llama-index-nextjs-streaming",
+  "version": "1.0.0",
+  "scripts": {
+    "dev": "next dev",
+    "build": "next build",
+    "start": "next start",
+    "lint": "next lint"
+  },
+  "dependencies": {
+    "ai": "^2",
+    "llamaindex": "0.0.31",
+    "next": "^13",
+    "openai": "^4.14.0",
+    "react": "^18",
+    "react-dom": "^18"
+  },
+  "devDependencies": {
+    "@types/node": "^20",
+    "@types/react": "^18",
+    "@types/react-dom": "^18",
+    "autoprefixer": "^10",
+    "eslint": "^8",
+    "eslint-config-next": "^13",
+    "postcss": "^8",
+    "tailwindcss": "^3",
+    "typescript": "^5"
+  }
+}
\ No newline at end of file
diff --git a/packages/create-llama/templates/streaming/nextjs/postcss.config.js b/packages/create-llama/templates/streaming/nextjs/postcss.config.js
new file mode 100644
index 0000000000000000000000000000000000000000..33ad091d26d8a9dc95ebdf616e217d985ec215b8
--- /dev/null
+++ b/packages/create-llama/templates/streaming/nextjs/postcss.config.js
@@ -0,0 +1,6 @@
+module.exports = {
+  plugins: {
+    tailwindcss: {},
+    autoprefixer: {},
+  },
+}
diff --git a/packages/create-llama/templates/streaming/nextjs/public/llama.png b/packages/create-llama/templates/streaming/nextjs/public/llama.png
new file mode 100644
index 0000000000000000000000000000000000000000..d4efba3b816bf765439c6d01b322b02684e946c3
Binary files /dev/null and b/packages/create-llama/templates/streaming/nextjs/public/llama.png differ
diff --git a/packages/create-llama/templates/streaming/nextjs/tailwind.config.ts b/packages/create-llama/templates/streaming/nextjs/tailwind.config.ts
new file mode 100644
index 0000000000000000000000000000000000000000..7e4bd91a03437328466a264489ce47e107635565
--- /dev/null
+++ b/packages/create-llama/templates/streaming/nextjs/tailwind.config.ts
@@ -0,0 +1,20 @@
+import type { Config } from "tailwindcss";
+
+const config: Config = {
+  content: [
+    "./pages/**/*.{js,ts,jsx,tsx,mdx}",
+    "./components/**/*.{js,ts,jsx,tsx,mdx}",
+    "./app/**/*.{js,ts,jsx,tsx,mdx}",
+  ],
+  theme: {
+    extend: {
+      backgroundImage: {
+        "gradient-radial": "radial-gradient(var(--tw-gradient-stops))",
+        "gradient-conic":
+          "conic-gradient(from 180deg at 50% 50%, var(--tw-gradient-stops))",
+      },
+    },
+  },
+  plugins: [],
+};
+export default config;
diff --git a/packages/create-llama/templates/streaming/nextjs/tsconfig.json b/packages/create-llama/templates/streaming/nextjs/tsconfig.json
new file mode 100644
index 0000000000000000000000000000000000000000..c7146963787144d4861e149d8d233049b7daefc7
--- /dev/null
+++ b/packages/create-llama/templates/streaming/nextjs/tsconfig.json
@@ -0,0 +1,27 @@
+{
+  "compilerOptions": {
+    "target": "es5",
+    "lib": ["dom", "dom.iterable", "esnext"],
+    "allowJs": true,
+    "skipLibCheck": true,
+    "strict": true,
+    "noEmit": true,
+    "esModuleInterop": true,
+    "module": "esnext",
+    "moduleResolution": "bundler",
+    "resolveJsonModule": true,
+    "isolatedModules": true,
+    "jsx": "preserve",
+    "incremental": true,
+    "plugins": [
+      {
+        "name": "next"
+      }
+    ],
+    "paths": {
+      "@/*": ["./*"]
+    }
+  },
+  "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
+  "exclude": ["node_modules"]
+}