Skip to content
Snippets Groups Projects
Commit 35a39844 authored by Marcus Schiesser's avatar Marcus Schiesser
Browse files

inline simple chat engine as a default

parent b55ce8aa
Branches
Tags
No related merge requests found
Showing
with 32 additions and 18 deletions
...@@ -71,17 +71,6 @@ export const installTemplate = async ({ ...@@ -71,17 +71,6 @@ export const installTemplate = async ({
parents: true, parents: true,
cwd: enginePath, cwd: enginePath,
}); });
const routeFile = path.join(
root,
relativeEngineDestPath,
framework === "nextjs" ? "route.ts" : "chat.controller.ts",
);
const routeFileContent = await fs.readFile(routeFile, "utf8");
const newContent = routeFileContent.replace(
/^import { createChatEngine }.*$/m,
'import { createChatEngine } from "./engine"\n',
);
await fs.writeFile(routeFile, newContent);
} }
/** /**
...@@ -125,8 +114,10 @@ export const installTemplate = async ({ ...@@ -125,8 +114,10 @@ export const installTemplate = async ({
customApiPath, customApiPath,
"\n", "\n",
); );
// remove the default api folder
const apiPath = path.join(root, "app", "api"); const apiPath = path.join(root, "app", "api");
await fs.rmdir(apiPath, { recursive: true }); await fs.rmdir(apiPath, { recursive: true });
// modify the dev script to use the custom api path
packageJson.scripts = { packageJson.scripts = {
...packageJson.scripts, ...packageJson.scripts,
dev: `NEXT_PUBLIC_CHAT_API=${customApiPath} next dev`, dev: `NEXT_PUBLIC_CHAT_API=${customApiPath} next dev`,
......
import { NextFunction, Request, Response } from "express"; import { NextFunction, Request, Response } from "express";
import { ChatMessage, OpenAI } from "llamaindex"; import { ChatMessage, OpenAI } from "llamaindex";
import { createChatEngine } from "../../../../engines/context"; import { createChatEngine } from "./engine";
export const chat = async (req: Request, res: Response, next: NextFunction) => { export const chat = async (req: Request, res: Response, next: NextFunction) => {
try { try {
......
...@@ -4,6 +4,7 @@ ...@@ -4,6 +4,7 @@
"esModuleInterop": true, "esModuleInterop": true,
"forceConsistentCasingInFileNames": true, "forceConsistentCasingInFileNames": true,
"strict": true, "strict": true,
"skipLibCheck": true "skipLibCheck": true,
"moduleResolution": "node"
} }
} }
\ No newline at end of file
import { LLM, SimpleChatEngine } from "llamaindex";
export async function createChatEngine(llm: LLM) {
return new SimpleChatEngine({
llm,
});
}
import { ChatMessage, OpenAI } from "llamaindex"; import { ChatMessage, OpenAI } from "llamaindex";
import { NextRequest, NextResponse } from "next/server"; import { NextRequest, NextResponse } from "next/server";
import { createChatEngine } from "../../../../../../engines/context"; import { createChatEngine } from "./engine";
export const runtime = "nodejs"; export const runtime = "nodejs";
export const dynamic = "force-dynamic"; export const dynamic = "force-dynamic";
......
import { streamToResponse } from "ai"; import { streamToResponse } from "ai";
import { NextFunction, Request, Response } from "express"; import { NextFunction, Request, Response } from "express";
import { ChatMessage, OpenAI } from "llamaindex"; import { ChatMessage, OpenAI } from "llamaindex";
import { createChatEngine } from "../../../../engines/context"; import { createChatEngine } from "./engine";
import { LlamaIndexStream } from "./llamaindex-stream"; import { LlamaIndexStream } from "./llamaindex-stream";
export const chat = async (req: Request, res: Response, next: NextFunction) => { export const chat = async (req: Request, res: Response, next: NextFunction) => {
......
import { LLM, SimpleChatEngine } from "llamaindex";
export async function createChatEngine(llm: LLM) {
return new SimpleChatEngine({
llm,
});
}
...@@ -4,6 +4,7 @@ ...@@ -4,6 +4,7 @@
"esModuleInterop": true, "esModuleInterop": true,
"forceConsistentCasingInFileNames": true, "forceConsistentCasingInFileNames": true,
"strict": true, "strict": true,
"skipLibCheck": true "skipLibCheck": true,
"moduleResolution": "node"
} }
} }
\ No newline at end of file
import { LLM, SimpleChatEngine } from "llamaindex";
export async function createChatEngine(llm: LLM) {
return new SimpleChatEngine({
llm,
});
}
import { Message, StreamingTextResponse } from "ai"; import { Message, StreamingTextResponse } from "ai";
import { OpenAI } from "llamaindex"; import { OpenAI } from "llamaindex";
import { NextRequest, NextResponse } from "next/server"; import { NextRequest, NextResponse } from "next/server";
import { createChatEngine } from "../../../../../../engines/context"; import { createChatEngine } from "./engine";
import { LlamaIndexStream } from "./llamaindex-stream"; import { LlamaIndexStream } from "./llamaindex-stream";
export const runtime = "nodejs"; export const runtime = "nodejs";
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment