Skip to content
Snippets Groups Projects
Commit e9678753 authored by Marcus Schiesser's avatar Marcus Schiesser
Browse files

added first draft of streaming nextjs template

parent 5e410785
No related branches found
No related tags found
No related merge requests found
Showing
with 406 additions and 0 deletions
# Rename this file to `.env.local` to use environment variables locally with `next dev`
# https://nextjs.org/docs/pages/building-your-application/configuring/environment-variables
MY_HOST="example.com"
This is a [LlamaIndex](https://www.llamaindex.ai/) project using [Next.js](https://nextjs.org/) bootstrapped with [`create-llama`](https://github.com/run-llama/LlamaIndexTS/tree/main/packages/create-llama).
## Getting Started
First, install the dependencies:
```
npm install
```
Second, run the development server:
```
npm run dev
```
Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.
You can start editing the page by modifying `app/page.tsx`. The page auto-updates as you edit the file.
This project uses [`next/font`](https://nextjs.org/docs/basic-features/font-optimization) to automatically optimize and load Inter, a custom Google Font.
## Learn More
To learn more about LlamaIndex, take a look at the following resources:
- [LlamaIndex Documentation](https://docs.llamaindex.ai) - learn about LlamaIndex (Python features).
- [LlamaIndexTS Documentation](https://ts.llamaindex.ai) - learn about LlamaIndex (Typescript features).
You can check out [the LlamaIndexTS GitHub repository](https://github.com/run-llama/LlamaIndexTS) - your feedback and contributions are welcome!
import { OpenAIStream, StreamingTextResponse } from "ai";
import { NextRequest, NextResponse } from "next/server";
import OpenAI from "openai";
export const runtime = "nodejs";
export const dynamic = "force-dynamic";
const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY,
});
export async function POST(request: NextRequest) {
try {
const body = await request.json();
const { messages } = body;
if (!messages) {
return NextResponse.json(
{
error: "messages are required in the request body",
},
{ status: 400 },
);
}
// const llm = new OpenAI({
// model: "gpt-3.5-turbo",
// });
// const chatEngine = new SimpleChatEngine({
// llm,
// });
// const response = await chatEngine.chat(message, chatHistory);
// const result: ChatMessage = {
// role: "assistant",
// content: response.response,
// };
// return NextResponse.json({ result });
const response = await openai.chat.completions.create({
model: "gpt-4",
stream: true,
messages,
});
// Transform the response into a readable stream
const stream = OpenAIStream(response);
// Return a StreamingTextResponse, which can be consumed by the client
return new StreamingTextResponse(stream);
} catch (error) {
console.error("[LlamaIndex]", error);
return NextResponse.json(
{
error: (error as Error).message,
},
{
status: 500,
},
);
}
}
"use client";
import { Message } from "ai/react";
import Image from "next/image";
export default function ChatAvatar(chatMessage: Message) {
if (chatMessage.role === "user") {
return (
<div className="flex h-8 w-8 shrink-0 select-none items-center justify-center rounded-md border shadow bg-background">
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 256 256"
fill="currentColor"
className="h-4 w-4"
>
<path d="M230.92 212c-15.23-26.33-38.7-45.21-66.09-54.16a72 72 0 1 0-73.66 0c-27.39 8.94-50.86 27.82-66.09 54.16a8 8 0 1 0 13.85 8c18.84-32.56 52.14-52 89.07-52s70.23 19.44 89.07 52a8 8 0 1 0 13.85-8ZM72 96a56 56 0 1 1 56 56 56.06 56.06 0 0 1-56-56Z"></path>
</svg>
</div>
);
}
return (
<div className="flex h-8 w-8 shrink-0 select-none items-center justify-center rounded-md border bg-black text-white">
<Image
className="rounded-md"
src="/llama.png"
alt="Llama Logo"
width={24}
height={24}
priority
/>
</div>
);
}
"use client";
import ChatItem from "@/app/components/chat-item";
import { Message } from "ai/react";
import { useEffect, useRef } from "react";
export default function ChatHistory({ messages }: { messages: Message[] }) {
const scrollableChatContainerRef = useRef<HTMLDivElement>(null);
const scrollToBottom = () => {
if (scrollableChatContainerRef.current) {
scrollableChatContainerRef.current.scrollTop =
scrollableChatContainerRef.current.scrollHeight;
}
};
useEffect(() => {
scrollToBottom();
}, [messages.length]);
return (
<div className="w-full max-w-5xl p-4 bg-white rounded-xl shadow-xl">
<div
className="flex flex-col gap-5 divide-y h-[50vh] overflow-auto"
ref={scrollableChatContainerRef}
>
{messages.map((m: Message) => (
<ChatItem key={m.id} {...m} />
))}
</div>
</div>
);
}
"use client";
import ChatAvatar from "@/app/components/chat-avatar";
import { Message } from "ai/react";
export default function ChatItem(chatMessage: Message) {
return (
<div className="flex items-start gap-4 pt-5">
<ChatAvatar {...chatMessage} />
<p className="break-words">{chatMessage.content}</p>
</div>
);
}
"use client";
import MessageForm from "@/app/components/message-form";
import { useChat } from "ai/react";
import ChatHistory from "./chat-history";
export default function ChatSection() {
const chat = useChat();
return (
<>
<ChatHistory messages={chat.messages} />
<MessageForm chat={chat} />
</>
);
}
import Image from "next/image";
export default function Header() {
return (
<div className="z-10 max-w-5xl w-full items-center justify-between font-mono text-sm lg:flex">
<p className="fixed left-0 top-0 flex w-full justify-center border-b border-gray-300 bg-gradient-to-b from-zinc-200 pb-6 pt-8 backdrop-blur-2xl dark:border-neutral-800 dark:bg-zinc-800/30 dark:from-inherit lg:static lg:w-auto lg:rounded-xl lg:border lg:bg-gray-200 lg:p-4 lg:dark:bg-zinc-800/30">
Get started by editing&nbsp;
<code className="font-mono font-bold">app/page.tsx</code>
</p>
<div className="fixed bottom-0 left-0 flex h-48 w-full items-end justify-center bg-gradient-to-t from-white via-white dark:from-black dark:via-black lg:static lg:h-auto lg:w-auto lg:bg-none">
<a
href="https://www.llamaindex.ai/"
className="flex items-center justify-center font-nunito text-lg font-bold gap-2"
>
<span>Built by LlamaIndex</span>
<Image
className="rounded-xl"
src="/llama.png"
alt="Llama Logo"
width={40}
height={40}
priority
/>
</a>
</div>
</div>
);
}
"use client";
import { UseChatHelpers } from "ai/react";
export default function MessageForm({ chat }: { chat: UseChatHelpers }) {
return (
<>
<form
onSubmit={chat.handleSubmit}
className="flex items-start justify-between w-full max-w-5xl p-4 bg-white rounded-xl shadow-xl gap-4"
>
<input
autoFocus
name="message"
placeholder="Type a message"
className="w-full p-4 rounded-xl shadow-inner flex-1"
value={chat.input}
onChange={chat.handleInputChange}
/>
<button
type="submit"
className="p-4 text-white rounded-xl shadow-xl bg-gradient-to-r from-cyan-500 to-sky-500 disabled:opacity-50 disabled:cursor-not-allowed"
>
Send message
</button>
</form>
</>
);
}
templates/streaming/nextjs/app/favicon.ico

15 KiB

@tailwind base;
@tailwind components;
@tailwind utilities;
:root {
--foreground-rgb: 0, 0, 0;
--background-start-rgb: 214, 219, 220;
--background-end-rgb: 255, 255, 255;
}
@media (prefers-color-scheme: dark) {
:root {
--foreground-rgb: 255, 255, 255;
--background-start-rgb: 0, 0, 0;
--background-end-rgb: 0, 0, 0;
}
}
body {
color: rgb(var(--foreground-rgb));
background: linear-gradient(
to bottom,
transparent,
rgb(var(--background-end-rgb))
)
rgb(var(--background-start-rgb));
}
.background-gradient {
background-color: #fff;
background-image: radial-gradient(
at 21% 11%,
rgba(186, 186, 233, 0.53) 0,
transparent 50%
),
radial-gradient(at 85% 0, hsla(46, 57%, 78%, 0.52) 0, transparent 50%),
radial-gradient(at 91% 36%, rgba(194, 213, 255, 0.68) 0, transparent 50%),
radial-gradient(at 8% 40%, rgba(251, 218, 239, 0.46) 0, transparent 50%);
}
import type { Metadata } from "next";
import { Inter } from "next/font/google";
import "./globals.css";
const inter = Inter({ subsets: ["latin"] });
export const metadata: Metadata = {
title: "Create Llama App",
description: "Generated by create-llama",
};
export default function RootLayout({
children,
}: {
children: React.ReactNode;
}) {
return (
<html lang="en">
<body className={inter.className}>{children}</body>
</html>
);
}
import Header from "@/app/components/header";
import ChatSection from "./components/chat-section";
export default function Home() {
return (
<main className="flex min-h-screen flex-col items-center gap-10 p-24 background-gradient">
<Header />
<ChatSection />
</main>
);
}
{
"extends": "next/core-web-vitals"
}
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
# dependencies
/node_modules
/.pnp
.pnp.js
# testing
/coverage
# next.js
/.next/
/out/
# production
/build
# misc
.DS_Store
*.pem
# debug
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# local env files
.env*.local
# vercel
.vercel
# typescript
*.tsbuildinfo
next-env.d.ts
/// <reference types="next" />
/// <reference types="next/image-types/global" />
// NOTE: This file should not be edited
// see https://nextjs.org/docs/basic-features/typescript for more information.
/** @type {import('next').NextConfig} */
const nextConfig = {
experimental: {
serverComponentsExternalPackages: ["llamaindex"],
},
}
module.exports = nextConfig
{
"name": "llama-index-nextjs-streaming",
"version": "1.0.0",
"scripts": {
"dev": "next dev",
"build": "next build",
"start": "next start",
"lint": "next lint"
},
"dependencies": {
"ai": "^2",
"llamaindex": "0.0.31",
"next": "^13",
"openai": "^4.14.0",
"react": "^18",
"react-dom": "^18"
},
"devDependencies": {
"@types/node": "^20",
"@types/react": "^18",
"@types/react-dom": "^18",
"autoprefixer": "^10",
"eslint": "^8",
"eslint-config-next": "^13",
"postcss": "^8",
"tailwindcss": "^3",
"typescript": "^5"
}
}
\ No newline at end of file
module.exports = {
plugins: {
tailwindcss: {},
autoprefixer: {},
},
}
templates/streaming/nextjs/public/llama.png

36.1 KiB

0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment