diff --git a/templates/types/streaming/nextjs/app/api/chat/route.ts b/templates/types/streaming/nextjs/app/api/chat/route.ts index 92e2f3b4947cd729ed44c7dadd2687cc844c9d33..92e874bb0daca21518b581e2d67a7828b78c2b6b 100644 --- a/templates/types/streaming/nextjs/app/api/chat/route.ts +++ b/templates/types/streaming/nextjs/app/api/chat/route.ts @@ -1,5 +1,5 @@ import { initObservability } from "@/app/observability"; -import { StreamingTextResponse } from "ai"; +import { Message, StreamingTextResponse } from "ai"; import { ChatMessage, MessageContent } from "llamaindex"; import { NextRequest, NextResponse } from "next/server"; import { createChatEngine } from "./engine/chat"; @@ -34,7 +34,7 @@ const convertMessageContent = ( export async function POST(request: NextRequest) { try { const body = await request.json(); - const { messages, data }: { messages: ChatMessage[]; data: any } = body; + const { messages, data }: { messages: Message[]; data: any } = body; const userMessage = messages.pop(); if (!messages || !userMessage || userMessage.role !== "user") { return NextResponse.json( @@ -57,7 +57,7 @@ export async function POST(request: NextRequest) { // Calling LlamaIndex's ChatEngine to get a streamed response const response = await chatEngine.chat({ message: userMessageContent, - chatHistory: messages, + chatHistory: messages as ChatMessage[], stream: true, });