Skip to content
Snippets Groups Projects
Commit 7c947022 authored by Thuc Pham's avatar Thuc Pham Committed by Marcus Schiesser
Browse files

Fix/express app can not parse request json body (#17)

parent c15e5532
No related branches found
No related tags found
No related merge requests found
......@@ -11,6 +11,8 @@ const env = process.env["NODE_ENV"];
const isDevelopment = !env || env === "development";
const prodCorsOrigin = process.env["PROD_CORS_ORIGIN"];
app.use(express.json());
if (isDevelopment) {
console.warn("Running in development mode - allowing CORS for all origins");
app.use(cors());
......
import { NextFunction, Request, Response } from "express";
import { ChatMessage, OpenAI } from "llamaindex";
import { Request, Response } from "express";
import { ChatMessage, MessageContent, OpenAI } from "llamaindex";
import { MODEL } from "../../constants";
import { createChatEngine } from "./engine";
export const chat = async (req: Request, res: Response, next: NextFunction) => {
const getLastMessageContent = (
textMessage: string,
imageUrl: string | undefined,
): MessageContent => {
if (!imageUrl) return textMessage;
return [
{
type: "text",
text: textMessage,
},
{
type: "image_url",
image_url: {
url: imageUrl,
},
},
];
};
export const chat = async (req: Request, res: Response) => {
try {
const { messages }: { messages: ChatMessage[] } = JSON.parse(req.body);
const { messages, data }: { messages: ChatMessage[]; data: any } = req.body;
const lastMessage = messages.pop();
if (!messages || !lastMessage || lastMessage.role !== "user") {
return res.status(400).json({
......@@ -18,9 +37,17 @@ export const chat = async (req: Request, res: Response, next: NextFunction) => {
model: MODEL,
});
const lastMessageContent = getLastMessageContent(
lastMessage.content,
data?.imageUrl,
);
const chatEngine = await createChatEngine(llm);
const response = await chatEngine.chat(lastMessage.content, messages);
const response = await chatEngine.chat(
lastMessageContent as MessageContent,
messages,
);
const result: ChatMessage = {
role: "assistant",
content: response.response,
......
......@@ -11,6 +11,8 @@ const env = process.env["NODE_ENV"];
const isDevelopment = !env || env === "development";
const prodCorsOrigin = process.env["PROD_CORS_ORIGIN"];
app.use(express.json());
if (isDevelopment) {
console.warn("Running in development mode - allowing CORS for all origins");
app.use(cors());
......
import { streamToResponse } from "ai";
import { NextFunction, Request, Response } from "express";
import { ChatMessage, OpenAI } from "llamaindex";
import { Request, Response } from "express";
import { ChatMessage, MessageContent, OpenAI } from "llamaindex";
import { MODEL } from "../../constants";
import { createChatEngine } from "./engine";
import { LlamaIndexStream } from "./llamaindex-stream";
export const chat = async (req: Request, res: Response, next: NextFunction) => {
const getLastMessageContent = (
textMessage: string,
imageUrl: string | undefined,
): MessageContent => {
if (!imageUrl) return textMessage;
return [
{
type: "text",
text: textMessage,
},
{
type: "image_url",
image_url: {
url: imageUrl,
},
},
];
};
export const chat = async (req: Request, res: Response) => {
try {
const { messages }: { messages: ChatMessage[] } = JSON.parse(req.body);
const { messages, data }: { messages: ChatMessage[]; data: any } = req.body;
const lastMessage = messages.pop();
if (!messages || !lastMessage || lastMessage.role !== "user") {
return res.status(400).json({
......@@ -22,7 +41,16 @@ export const chat = async (req: Request, res: Response, next: NextFunction) => {
const chatEngine = await createChatEngine(llm);
const response = await chatEngine.chat(lastMessage.content, messages, true);
const lastMessageContent = getLastMessageContent(
lastMessage.content,
data?.imageUrl,
);
const response = await chatEngine.chat(
lastMessageContent as MessageContent,
messages,
true,
);
// Transform the response into a readable stream
const stream = LlamaIndexStream(response);
......
......@@ -13,7 +13,12 @@ export default function ChatSection() {
handleInputChange,
reload,
stop,
} = useChat({ api: process.env.NEXT_PUBLIC_CHAT_API });
} = useChat({
api: process.env.NEXT_PUBLIC_CHAT_API,
headers: {
"Content-Type": "application/json", // using JSON because of vercel/ai 2.2.26
},
});
return (
<div className="space-y-4 max-w-5xl w-full">
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment