From 5463d3bf4b840d11356bc5076efbc4fd24387a42 Mon Sep 17 00:00:00 2001 From: Marcus Schiesser <mail@marcusschiesser.de> Date: Fri, 12 Apr 2024 17:16:25 +0800 Subject: [PATCH] fix: nextjs type checks --- templates/types/streaming/nextjs/app/api/chat/route.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/templates/types/streaming/nextjs/app/api/chat/route.ts b/templates/types/streaming/nextjs/app/api/chat/route.ts index 92e2f3b4..92e874bb 100644 --- a/templates/types/streaming/nextjs/app/api/chat/route.ts +++ b/templates/types/streaming/nextjs/app/api/chat/route.ts @@ -1,5 +1,5 @@ import { initObservability } from "@/app/observability"; -import { StreamingTextResponse } from "ai"; +import { Message, StreamingTextResponse } from "ai"; import { ChatMessage, MessageContent } from "llamaindex"; import { NextRequest, NextResponse } from "next/server"; import { createChatEngine } from "./engine/chat"; @@ -34,7 +34,7 @@ const convertMessageContent = ( export async function POST(request: NextRequest) { try { const body = await request.json(); - const { messages, data }: { messages: ChatMessage[]; data: any } = body; + const { messages, data }: { messages: Message[]; data: any } = body; const userMessage = messages.pop(); if (!messages || !userMessage || userMessage.role !== "user") { return NextResponse.json( @@ -57,7 +57,7 @@ export async function POST(request: NextRequest) { // Calling LlamaIndex's ChatEngine to get a streamed response const response = await chatEngine.chat({ message: userMessageContent, - chatHistory: messages, + chatHistory: messages as ChatMessage[], stream: true, }); -- GitLab