From bf53cd3d3da22c7b3ab2927276aa453eea9d4ad7 Mon Sep 17 00:00:00 2001 From: Marcus Schiesser <mail@marcusschiesser.de> Date: Wed, 6 Dec 2023 10:43:17 +0700 Subject: [PATCH] feat: added e2e (thanks @himself65) --- .github/workflows/e2e.yml | 45 +++++++ .gitignore | 6 + packages/create-llama/create-app.ts | 2 +- packages/create-llama/e2e/basic.spec.ts | 129 +++++++++++++++++++++ packages/create-llama/e2e/tsconfig.json | 11 ++ packages/create-llama/index.ts | 38 +++++- packages/create-llama/package.json | 8 +- packages/create-llama/playwright.config.ts | 21 ++++ packages/create-llama/questions.ts | 22 +++- packages/create-llama/templates/index.ts | 4 +- packages/create-llama/templates/types.ts | 2 +- packages/create-llama/tsconfig.json | 7 +- 12 files changed, 282 insertions(+), 13 deletions(-) create mode 100644 .github/workflows/e2e.yml create mode 100644 packages/create-llama/e2e/basic.spec.ts create mode 100644 packages/create-llama/e2e/tsconfig.json create mode 100644 packages/create-llama/playwright.config.ts diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml new file mode 100644 index 000000000..c1140fe79 --- /dev/null +++ b/.github/workflows/e2e.yml @@ -0,0 +1,45 @@ +name: E2E Tests +on: + push: + branches: [ main ] + pull_request: + paths: + - 'packages/create-llama/**' + - '.github/workflows/e2e.yml' + branches: [ main ] + +jobs: + e2e: + name: create-llama + timeout-minutes: 60 + strategy: + fail-fast: true + matrix: + node-version: [18, 20] + os: [macos-latest, ubuntu-latest, windows-latest] + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v4 + - uses: pnpm/action-setup@v2 + - name: Setup Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v4 + with: + node-version: ${{ matrix.node-version }} + cache: 'pnpm' + - name: Install dependencies + run: pnpm install + - name: Install Playwright Browsers + run: pnpm exec playwright install --with-deps + working-directory: ./packages/create-llama + - name: Build create-llama + run: pnpm run build + working-directory: ./packages/create-llama + - name: Run Playwright tests + run: pnpm exec playwright test + working-directory: ./packages/create-llama + - uses: actions/upload-artifact@v3 + if: always() + with: + name: playwright-report + path: ./packages/create-llama/playwright-report/ + retention-days: 30 diff --git a/.gitignore b/.gitignore index 2012eca47..82958076e 100644 --- a/.gitignore +++ b/.gitignore @@ -40,3 +40,9 @@ dist/ # vs code .vscode/launch.json + +.cache +test-results/ +playwright-report/ +blob-report/ +playwright/.cache/ diff --git a/packages/create-llama/create-app.ts b/packages/create-llama/create-app.ts index 06b7b0582..26adac813 100644 --- a/packages/create-llama/create-app.ts +++ b/packages/create-llama/create-app.ts @@ -29,7 +29,7 @@ export async function createApp({ packageManager, eslint, frontend, - openAIKey, + openAiKey: openAIKey, model, communityProjectPath, }: InstallAppArgs): Promise<void> { diff --git a/packages/create-llama/e2e/basic.spec.ts b/packages/create-llama/e2e/basic.spec.ts new file mode 100644 index 000000000..36b8d793f --- /dev/null +++ b/packages/create-llama/e2e/basic.spec.ts @@ -0,0 +1,129 @@ +/* eslint-disable turbo/no-undeclared-env-vars */ +import { expect, test } from "@playwright/test"; +import { exec } from "child_process"; +import { execSync } from "node:child_process"; +import crypto from "node:crypto"; +import { mkdir } from "node:fs/promises"; +import { fileURLToPath } from "node:url"; +import waitPort from "wait-port"; +import type { + TemplateEngine, + TemplateFramework, + TemplateType, + TemplateUI, +} from "../templates"; + +let cwd: string; +test.beforeEach(async () => { + cwd = fileURLToPath( + new URL(`.cache/${crypto.randomUUID()}`, import.meta.url), + ); + await mkdir(cwd, { recursive: true }); +}); + +const templateTypes: TemplateType[] = ["streaming", "simple"]; +const templateFrameworks: TemplateFramework[] = ["nextjs", "express"]; +const templateEngines: TemplateEngine[] = ["simple", "context"]; +const templateUIs: TemplateUI[] = ["shadcn", "html"]; + +for (const templateType of templateTypes) { + for (const templateFramework of templateFrameworks) { + for (const templateEngine of templateEngines) { + for (const templateUI of templateUIs) { + const shouldGenerateFrontendEnum = + templateFramework === "express" || templateFramework === "fastapi" + ? ["--frontend", "--no-frontend"] + : [""]; + for (const shouldGenerateFrontend of shouldGenerateFrontendEnum) { + if (templateEngine === "context") { + // we don't test context templates because it needs OPEN_AI_KEY + continue; + } + test(`try create-llama ${templateType} ${templateFramework} ${templateEngine} ${templateUI} ${shouldGenerateFrontend}`, async ({ + page, + }) => { + const createLlama = fileURLToPath( + new URL("../dist/index.js", import.meta.url), + ); + + const name = [ + templateType, + templateFramework, + templateEngine, + templateUI, + shouldGenerateFrontend, + ].join("-"); + const command = [ + "node", + createLlama, + name, + "--template", + templateType, + "--framework", + templateFramework, + "--engine", + templateEngine, + "--ui", + templateUI, + "--open-ai-key", + process.env.OPEN_AI_KEY || "", + shouldGenerateFrontend, + "--eslint", + ].join(" "); + console.log(`running command '${command}' in ${cwd}`); + execSync(command, { + stdio: "inherit", + cwd, + }); + + const port = Math.floor(Math.random() * 10000) + 10000; + + if ( + shouldGenerateFrontend === "--frontend" && + templateFramework === "express" + ) { + execSync("npm install", { + stdio: "inherit", + cwd: `${cwd}/${name}/frontend`, + }); + execSync("npm install", { + stdio: "inherit", + cwd: `${cwd}/${name}/backend`, + }); + } else { + execSync("npm install", { + stdio: "inherit", + cwd: `${cwd}/${name}`, + }); + } + + if (shouldGenerateFrontend === "--no-frontend") { + return; + } + + const cp = exec("npm run dev", { + cwd: + shouldGenerateFrontend === "--frontend" + ? `${cwd}/${name}/frontend` + : `${cwd}/${name}`, + env: { + ...process.env, + PORT: `${port}`, + }, + }); + + await waitPort({ + host: "localhost", + port, + timeout: 1000 * 60, + }); + + await page.goto(`http://localhost:${port}`); + await expect(page.getByText("Built by LlamaIndex")).toBeVisible(); + cp.kill(); + }); + } + } + } + } +} diff --git a/packages/create-llama/e2e/tsconfig.json b/packages/create-llama/e2e/tsconfig.json new file mode 100644 index 000000000..a9d7a67e6 --- /dev/null +++ b/packages/create-llama/e2e/tsconfig.json @@ -0,0 +1,11 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "module": "ESNext", + "target": "ESNext", + "verbatimModuleSyntax": true + }, + "include": [ + "./**/*.ts" + ] +} diff --git a/packages/create-llama/index.ts b/packages/create-llama/index.ts index 72ee5cd8b..d2cc16060 100644 --- a/packages/create-llama/index.ts +++ b/packages/create-llama/index.ts @@ -61,6 +61,42 @@ const program = new Commander.Command(packageJson.name) ` Explicitly tell the CLI to reset any stored preferences +`, + ) + .option( + "--template <template>", + ` + Select a template to bootstrap the application with. +`, + ) + .option( + "--engine <engine>", + ` + Select a chat engine to bootstrap the application with. +`, + ) + .option( + "--framework <framework>", + ` + Select a framework to bootstrap the application with. +`, + ) + .option( + "--open-ai-key <key>", + ` + Provide an OpenAI API key. +`, + ) + .option( + "--ui <ui>", + ` + Select a UI to bootstrap the application with. +`, + ) + .option( + "--frontend", + ` + Whether to generate a frontend for your backend. `, ) .allowUnknownOption() @@ -157,7 +193,7 @@ async function run(): Promise<void> { packageManager, eslint: program.eslint, frontend: program.frontend, - openAIKey: program.openAIKey, + openAiKey: program.openAiKey, model: program.model, communityProjectPath: program.communityProjectPath, }); diff --git a/packages/create-llama/package.json b/packages/create-llama/package.json index f69cb0263..f0079ecbc 100644 --- a/packages/create-llama/package.json +++ b/packages/create-llama/package.json @@ -1,6 +1,7 @@ { "name": "create-llama", "version": "0.0.11", + "type": "module", "keywords": [ "rag", "llamaindex", @@ -23,9 +24,11 @@ "dev": "ncc build ./index.ts -w -o dist/", "build": "ncc build ./index.ts -o ./dist/ --minify --no-cache --no-source-map-register", "lint": "eslint . --ignore-pattern dist", + "e2e": "playwright test", "prepublishOnly": "cd ../../ && turbo run build" }, "devDependencies": { + "@playwright/test": "^1.40.0", "@types/async-retry": "1.4.2", "@types/ci-info": "2.0.0", "@types/cross-spawn": "6.0.0", @@ -47,9 +50,10 @@ "tar": "6.1.15", "terminal-link": "^3.0.0", "update-check": "1.5.4", - "validate-npm-package-name": "3.0.0" + "validate-npm-package-name": "3.0.0", + "wait-port": "^1.1.0" }, "engines": { "node": ">=16.14.0" } -} +} \ No newline at end of file diff --git a/packages/create-llama/playwright.config.ts b/packages/create-llama/playwright.config.ts new file mode 100644 index 000000000..0b4b420b7 --- /dev/null +++ b/packages/create-llama/playwright.config.ts @@ -0,0 +1,21 @@ +/* eslint-disable turbo/no-undeclared-env-vars */ +import { defineConfig, devices } from "@playwright/test"; + +export default defineConfig({ + testDir: "./e2e", + fullyParallel: true, + forbidOnly: !!process.env.CI, + retries: process.env.CI ? 2 : 0, + workers: process.env.CI ? 1 : undefined, + timeout: 1000 * 60 * 5, + reporter: "html", + use: { + trace: "on-first-retry", + }, + projects: [ + { + name: "chromium", + use: { ...devices["Desktop Chrome"] }, + }, + ], +}); diff --git a/packages/create-llama/questions.ts b/packages/create-llama/questions.ts index 7f0786acb..c3f5e696d 100644 --- a/packages/create-llama/questions.ts +++ b/packages/create-llama/questions.ts @@ -14,7 +14,7 @@ const defaults: QuestionArgs = { ui: "html", eslint: true, frontend: false, - openAIKey: "", + openAiKey: "", model: "gpt-3.5-turbo", communityProjectPath: "", }; @@ -131,8 +131,11 @@ export const askQuestions = async ( } if (program.framework === "express" || program.framework === "fastapi") { + if (process.argv.includes("--no-frontend")) { + program.frontend = false; + } // if a backend-only framework is selected, ask whether we should create a frontend - if (!program.frontend) { + if (program.frontend === undefined) { if (ciInfo.isCI) { program.frontend = getPrefOrDefault("frontend"); } else { @@ -157,6 +160,9 @@ export const askQuestions = async ( preferences.frontend = Boolean(frontend); } } + } else { + // single project if framework is nextjs + program.frontend = false; } if (program.framework === "nextjs" || program.frontend) { @@ -239,7 +245,7 @@ export const askQuestions = async ( } } - if (!program.openAIKey) { + if (!program.openAiKey) { const { key } = await prompts( { type: "text", @@ -248,8 +254,8 @@ export const askQuestions = async ( }, handlers, ); - program.openAIKey = key; - preferences.openAIKey = key; + program.openAiKey = key; + preferences.openAiKey = key; } if ( @@ -274,4 +280,10 @@ export const askQuestions = async ( preferences.eslint = Boolean(eslint); } } + + // TODO: consider using zod to validate the input (doesn't work like this as not every option is required) + // templateUISchema.parse(program.ui); + // templateEngineSchema.parse(program.engine); + // templateFrameworkSchema.parse(program.framework); + // templateTypeSchema.parse(program.template);`` }; diff --git a/packages/create-llama/templates/index.ts b/packages/create-llama/templates/index.ts index cd675448c..76b9f8806 100644 --- a/packages/create-llama/templates/index.ts +++ b/packages/create-llama/templates/index.ts @@ -341,7 +341,7 @@ export const installTemplate = async ( // This is a backend, so we need to copy the test data and create the env file. // Copy the environment file to the target directory. - await createEnvLocalFile(props.root, props.openAIKey); + await createEnvLocalFile(props.root, props.openAiKey); // Copy test pdf file await copyTestData( @@ -349,7 +349,7 @@ export const installTemplate = async ( props.framework, props.packageManager, props.engine, - props.openAIKey, + props.openAiKey, ); } }; diff --git a/packages/create-llama/templates/types.ts b/packages/create-llama/templates/types.ts index b6ff2f835..eaab3951e 100644 --- a/packages/create-llama/templates/types.ts +++ b/packages/create-llama/templates/types.ts @@ -16,7 +16,7 @@ export interface InstallTemplateArgs { ui: TemplateUI; eslint: boolean; customApiPath?: string; - openAIKey?: string; + openAiKey?: string; forBackend?: string; model: string; communityProjectPath?: string; diff --git a/packages/create-llama/tsconfig.json b/packages/create-llama/tsconfig.json index e4edad9e1..b8a4782f7 100644 --- a/packages/create-llama/tsconfig.json +++ b/packages/create-llama/tsconfig.json @@ -7,5 +7,10 @@ "esModuleInterop": true, "skipLibCheck": false }, - "exclude": ["templates", "dist"] + "exclude": ["templates", "dist"], + "references": [ + { + "path": "./e2e/tsconfig.json" + } + ] } -- GitLab