Skip to content
Snippets Groups Projects
Commit a0fbcd4e authored by Marcus Schiesser's avatar Marcus Schiesser
Browse files

add support for chat engines to express

parent b6f67d93
No related branches found
No related tags found
No related merge requests found
......@@ -239,30 +239,32 @@ async function run(): Promise<void> {
}
}
if (!program.engine) {
if (ciInfo.isCI) {
program.engine = getPrefOrDefault("engine");
} else {
const { engine } = await prompts(
{
type: "select",
name: "engine",
message: "Which chat engine would you like to use?",
choices: [
{ title: "SimpleChatEngine", value: "simple" },
{ title: "ContextChatEngine", value: "context" },
],
initial: 0,
},
{
onCancel: () => {
console.error("Exiting.");
process.exit(1);
if (program.framework === "express" || program.framework === "nextjs") {
if (!program.engine) {
if (ciInfo.isCI) {
program.engine = getPrefOrDefault("engine");
} else {
const { engine } = await prompts(
{
type: "select",
name: "engine",
message: "Which chat engine would you like to use?",
choices: [
{ title: "SimpleChatEngine", value: "simple" },
{ title: "ContextChatEngine", value: "context" },
],
initial: 0,
},
},
);
program.engine = engine;
preferences.engine = engine;
{
onCancel: () => {
console.error("Exiting.");
process.exit(1);
},
},
);
program.engine = engine;
preferences.engine = engine;
}
}
}
......
......@@ -56,20 +56,30 @@ export const installTemplate = async ({
/**
* Copy the selected chat engine files to the target directory and reference it.
*/
console.log("\nUsing chat engine:", engine, "\n");
const enginePath = path.join(__dirname, "engines", engine);
const engineDestPath = path.join(root, "app", "api", "chat", "engine");
await copy("**", engineDestPath, {
parents: true,
cwd: enginePath,
});
const routeFile = path.join(engineDestPath, "..", "route.ts");
const routeFileContent = await fs.readFile(routeFile, "utf8");
const newContent = routeFileContent.replace(
/^import { createChatEngine }.*$/m,
'import { createChatEngine } from "./engine"\n',
);
await fs.writeFile(routeFile, newContent);
let relativeEngineDestPath;
if (framework === "express" || framework === "nextjs") {
console.log("\nUsing chat engine:", engine, "\n");
const enginePath = path.join(__dirname, "engines", engine);
relativeEngineDestPath =
framework === "nextjs"
? path.join("app", "api", "chat")
: path.join("src", "controllers");
await copy("**", path.join(root, relativeEngineDestPath, "engine"), {
parents: true,
cwd: enginePath,
});
const routeFile = path.join(
root,
relativeEngineDestPath,
framework === "nextjs" ? "route.ts" : "llm.controller.ts",
);
const routeFileContent = await fs.readFile(routeFile, "utf8");
const newContent = routeFileContent.replace(
/^import { createChatEngine }.*$/m,
'import { createChatEngine } from "./engine"\n',
);
await fs.writeFile(routeFile, newContent);
}
/**
* Update the package.json scripts.
......@@ -86,11 +96,15 @@ export const installTemplate = async ({
llamaindex: version,
};
if (engine === "context") {
if (engine === "context" && relativeEngineDestPath) {
// add generate script if using context engine
packageJson.scripts = {
...packageJson.scripts,
generate: "node ./app/api/chat/engine/generate.mjs",
generate: `node ${path.join(
relativeEngineDestPath,
"engine",
"generate.mjs",
)}`,
};
}
......
import { ChatMessage, OpenAI, SimpleChatEngine } from "llamaindex";
import { NextFunction, Request, Response } from "express";
import { ChatMessage, OpenAI } from "llamaindex";
import { createChatEngine } from "../../../../engines/context";
export const chat = async (req: Request, res: Response, next: NextFunction) => {
try {
const {
message,
chatHistory,
}: {
message: string;
chatHistory: ChatMessage[];
} = req.body;
if (!message || !chatHistory) {
return res.status(400).json({
error: "message, chatHistory are required in the request body",
});
}
try {
const {
message,
chatHistory,
}: {
message: string;
chatHistory: ChatMessage[];
} = req.body;
if (!message || !chatHistory) {
return res.status(400).json({
error: "message, chatHistory are required in the request body",
});
}
const llm = new OpenAI({
model: "gpt-3.5-turbo",
});
const llm = new OpenAI({
model: "gpt-3.5-turbo",
});
const chatEngine = new SimpleChatEngine({
llm,
});
const chatEngine = await createChatEngine(llm);
const response = await chatEngine.chat(message, chatHistory);
const result: ChatMessage = {
role: "assistant",
content: response.response,
};
const response = await chatEngine.chat(message, chatHistory);
const result: ChatMessage = {
role: "assistant",
content: response.response,
};
return res.status(200).json({
result,
});
} catch (error) {
console.error("[LlamaIndex]", error);
return res.status(500).json({
error: (error as Error).message,
});
}
return res.status(200).json({
result,
});
} catch (error) {
console.error("[LlamaIndex]", error);
return res.status(500).json({
error: (error as Error).message,
});
}
};
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment