Skip to content
Snippets Groups Projects
Commit b8345f84 authored by Marcus Schiesser's avatar Marcus Schiesser Committed by GitHub
Browse files

feat: add /api/chat e2e test (uses openai key) (#287)


* feat: allow custom external port

---------

Co-authored-by: default avatarthucpn <thucsh2@gmail.com>
parent 77aeed39
No related branches found
No related tags found
No related merge requests found
......@@ -33,6 +33,7 @@ export async function createApp({
model,
communityProjectPath,
vectorDb,
externalPort,
}: InstallAppArgs): Promise<void> {
const root = path.resolve(appPath);
......@@ -73,6 +74,7 @@ export async function createApp({
model,
communityProjectPath,
vectorDb,
externalPort,
};
if (frontend) {
......@@ -87,7 +89,7 @@ export async function createApp({
...args,
root: frontendRoot,
framework: "nextjs",
customApiPath: "http://localhost:8000/api/chat",
customApiPath: `http://localhost:${externalPort ?? 8000}/api/chat`,
backend: false,
});
// copy readme for fullstack
......
/* eslint-disable turbo/no-undeclared-env-vars */
import { expect, test } from "@playwright/test";
import { ChildProcess } from "child_process";
import type {
TemplateEngine,
TemplateFramework,
......@@ -31,30 +32,83 @@ for (const templateType of templateTypes) {
? "--no-frontend" // simple templates don't have frontends
: "--frontend"
: "";
test(`try create-llama ${templateType} ${templateFramework} ${templateEngine} ${templateUI} ${appType}`, async ({
page,
}) => {
const cwd = await createTestDir();
const name = runCreateLlama(
cwd,
templateType,
templateFramework,
templateEngine,
templateUI,
appType,
);
const port = Math.floor(Math.random() * 10000) + 10000;
const cps = await runApp(cwd, name, appType, port);
// test frontend
if (appType !== "--no-frontend") {
test.describe(`try create-llama ${templateType} ${templateFramework} ${templateEngine} ${templateUI} ${appType}`, async () => {
let port: number;
let externalPort: number;
let cwd: string;
let name: string;
let cps: ChildProcess[];
test.beforeAll(async () => {
port = Math.floor(Math.random() * 10000) + 10000;
externalPort = port + 1;
cwd = await createTestDir();
name = runCreateLlama(
cwd,
templateType,
templateFramework,
templateEngine,
templateUI,
appType,
externalPort,
);
cps = await runApp(cwd, name, appType, port, externalPort);
});
test("Frontend should have a title", async ({ page }) => {
test.skip(appType === "--no-frontend");
await page.goto(`http://localhost:${port}`);
await expect(page.getByText("Built by LlamaIndex")).toBeVisible();
}
// TODO: test backend using curl (would need OpenAI key)
});
test("Frontend should be able to submit a message and receive a response", async ({
page,
}) => {
test.skip(appType === "--no-frontend");
await page.goto(`http://localhost:${port}`);
await page.fill("form input", "hello");
await page.click("form button[type=submit]");
const response = await page.waitForResponse(
(res) => {
return res.url().includes("/api/chat") && res.status() === 200;
},
{
timeout: 1000 * 60,
},
);
const text = await response.text();
console.log("AI response when submitting message: ", text);
expect(response.ok()).toBeTruthy();
});
test("Backend should response when calling API", async ({
request,
}) => {
test.skip(appType !== "--no-frontend");
const response = await request.post(
`http://localhost:${port}/api/chat`,
{
data: {
messages: [
{
role: "user",
content: "Hello",
},
],
},
},
);
const text = await response.text();
console.log("AI response when calling API: ", text);
expect(response.ok()).toBeTruthy();
});
// clean processes
cps.forEach((cp) => cp.kill());
test.afterAll(async () => {
cps.map((cp) => cp.kill());
});
});
}
}
......
......@@ -12,6 +12,7 @@ export async function runApp(
name: string,
appType: AppType,
port: number,
externalPort: number,
): Promise<ChildProcess[]> {
const cps: ChildProcess[] = [];
......@@ -22,7 +23,7 @@ export async function runApp(
await createProcess(
"npm run dev",
path.join(cwd, name, "backend"),
port + 1,
externalPort,
),
);
cps.push(
......@@ -71,6 +72,7 @@ export function runCreateLlama(
templateEngine: string,
templateUI: string,
appType: AppType,
externalPort: number,
) {
const createLlama = path.join(__dirname, "..", "dist", "index.js");
......@@ -96,10 +98,12 @@ export function runCreateLlama(
"--model",
MODEL,
"--open-ai-key",
"testKey",
process.env.OPENAI_API_KEY || "testKey",
appType,
"--eslint",
"--use-npm",
"--external-port",
externalPort,
].join(" ");
console.log(`running command '${command}' in ${cwd}`);
execSync(command, {
......
......@@ -106,10 +106,17 @@ const program = new Commander.Command(packageJson.name)
`,
)
.option(
"--model",
"--model <model>",
`
Select OpenAI model to use. E.g. gpt-3.5-turbo.
`,
)
.option(
"--external-port <external>",
`
Select external port.
`,
)
.allowUnknownOption()
......@@ -210,6 +217,7 @@ async function run(): Promise<void> {
model: program.model,
communityProjectPath: program.communityProjectPath,
vectorDb: program.vectorDb,
externalPort: program.externalPort,
});
conf.set("preferences", preferences);
}
......
......@@ -22,4 +22,5 @@ export interface InstallTemplateArgs {
model: string;
communityProjectPath?: string;
vectorDb?: TemplateVectorDB;
externalPort?: number;
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment