diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml
new file mode 100644
index 0000000000000000000000000000000000000000..40c612c395e2b6fa5cfeced3b839349694c6028b
--- /dev/null
+++ b/.github/workflows/e2e.yml
@@ -0,0 +1,45 @@
+name: E2E Tests
+on:
+  push:
+    branches: [main]
+  pull_request:
+    paths:
+      - "packages/create-llama/**"
+      - ".github/workflows/e2e.yml"
+    branches: [main]
+
+jobs:
+  e2e:
+    name: create-llama
+    timeout-minutes: 60
+    strategy:
+      fail-fast: true
+      matrix:
+        node-version: [18, 20]
+        os: [macos-latest] # add windows-latest after timeout issue is fixed (see https://github.com/run-llama/LlamaIndexTS/issues/263)
+    runs-on: ${{ matrix.os }}
+    steps:
+      - uses: actions/checkout@v4
+      - uses: pnpm/action-setup@v2
+      - name: Setup Node.js ${{ matrix.node-version }}
+        uses: actions/setup-node@v4
+        with:
+          node-version: ${{ matrix.node-version }}
+          cache: "pnpm"
+      - name: Install dependencies
+        run: pnpm install
+      - name: Install Playwright Browsers
+        run: pnpm exec playwright install --with-deps
+        working-directory: ./packages/create-llama
+      - name: Build create-llama
+        run: pnpm run build
+        working-directory: ./packages/create-llama
+      - name: Run Playwright tests
+        run: pnpm exec playwright test
+        working-directory: ./packages/create-llama
+      - uses: actions/upload-artifact@v3
+        if: always()
+        with:
+          name: playwright-report
+          path: ./packages/create-llama/playwright-report/
+          retention-days: 30
diff --git a/.gitignore b/.gitignore
index 2012eca47ffc42825f940fb71786d78f65a47d0e..82958076eedb9bc1ebaed1ef4ecd8f5600611371 100644
--- a/.gitignore
+++ b/.gitignore
@@ -40,3 +40,9 @@ dist/
 
 # vs code
 .vscode/launch.json
+
+.cache
+test-results/
+playwright-report/
+blob-report/
+playwright/.cache/
diff --git a/packages/create-llama/create-app.ts b/packages/create-llama/create-app.ts
index 06b7b0582ff80b9d6a5bc06cfce8c81296049c53..cdaa6dbc4c456927599b7b38fbacc812d84fa04f 100644
--- a/packages/create-llama/create-app.ts
+++ b/packages/create-llama/create-app.ts
@@ -29,7 +29,7 @@ export async function createApp({
   packageManager,
   eslint,
   frontend,
-  openAIKey,
+  openAiKey,
   model,
   communityProjectPath,
 }: InstallAppArgs): Promise<void> {
@@ -68,7 +68,7 @@ export async function createApp({
     packageManager,
     isOnline,
     eslint,
-    openAIKey,
+    openAiKey,
     model,
     communityProjectPath,
   };
diff --git a/packages/create-llama/e2e/basic.spec.ts b/packages/create-llama/e2e/basic.spec.ts
new file mode 100644
index 0000000000000000000000000000000000000000..dbc4411416035203c4e4eb733a03f68067de956a
--- /dev/null
+++ b/packages/create-llama/e2e/basic.spec.ts
@@ -0,0 +1,62 @@
+/* eslint-disable turbo/no-undeclared-env-vars */
+import { expect, test } from "@playwright/test";
+import type {
+  TemplateEngine,
+  TemplateFramework,
+  TemplateType,
+  TemplateUI,
+} from "../templates";
+import { createTestDir, runApp, runCreateLlama, type AppType } from "./utils";
+
+const templateTypes: TemplateType[] = ["streaming", "simple"];
+const templateFrameworks: TemplateFramework[] = ["nextjs", "express"];
+const templateEngines: TemplateEngine[] = ["simple", "context"];
+const templateUIs: TemplateUI[] = ["shadcn", "html"];
+
+for (const templateType of templateTypes) {
+  for (const templateFramework of templateFrameworks) {
+    for (const templateEngine of templateEngines) {
+      for (const templateUI of templateUIs) {
+        if (templateFramework === "nextjs" && templateType === "simple") {
+          // nextjs doesn't support simple templates - skip tests
+          continue;
+        }
+        if (templateEngine === "context") {
+          // we don't test context templates because it needs OPEN_AI_KEY
+          continue;
+        }
+        const appType: AppType =
+          templateFramework === "express" || templateFramework === "fastapi"
+            ? templateType === "simple"
+              ? "--no-frontend" // simple templates don't have frontends
+              : "--frontend"
+            : "";
+        test(`try create-llama ${templateType} ${templateFramework} ${templateEngine} ${templateUI} ${appType}`, async ({
+          page,
+        }) => {
+          const cwd = await createTestDir();
+          const name = runCreateLlama(
+            cwd,
+            templateType,
+            templateFramework,
+            templateEngine,
+            templateUI,
+            appType,
+          );
+
+          const port = Math.floor(Math.random() * 10000) + 10000;
+          const cps = await runApp(cwd, name, appType, port);
+
+          // test frontend
+          if (appType !== "--no-frontend") {
+            await page.goto(`http://localhost:${port}`);
+            await expect(page.getByText("Built by LlamaIndex")).toBeVisible();
+          }
+          // TODO: test backend using curl (would need OpenAI key)
+          // clean processes
+          cps.forEach((cp) => cp.kill());
+        });
+      }
+    }
+  }
+}
diff --git a/packages/create-llama/e2e/utils.ts b/packages/create-llama/e2e/utils.ts
new file mode 100644
index 0000000000000000000000000000000000000000..2716254a02428cc53a3d1c1df282b62a7da6c695
--- /dev/null
+++ b/packages/create-llama/e2e/utils.ts
@@ -0,0 +1,115 @@
+import { ChildProcess, exec, execSync } from "child_process";
+import crypto from "node:crypto";
+import { mkdir } from "node:fs/promises";
+import * as path from "path";
+import waitPort from "wait-port";
+
+export type AppType = "--frontend" | "--no-frontend" | "";
+const MODEL = "gpt-3.5-turbo";
+
+export async function runApp(
+  cwd: string,
+  name: string,
+  appType: AppType,
+  port: number,
+): Promise<ChildProcess[]> {
+  const cps: ChildProcess[] = [];
+
+  try {
+    switch (appType) {
+      case "--frontend":
+        cps.push(
+          await createProcess(
+            "npm run dev",
+            path.join(cwd, name, "backend"),
+            port + 1,
+          ),
+        );
+        cps.push(
+          await createProcess(
+            "npm run dev",
+            path.join(cwd, name, "frontend"),
+            port,
+          ),
+        );
+        break;
+      default:
+        cps.push(
+          await createProcess("npm run dev", path.join(cwd, name), port),
+        );
+        break;
+    }
+  } catch (e) {
+    cps.forEach((cp) => cp.kill());
+    throw e;
+  }
+  return cps;
+}
+
+async function createProcess(command: string, cwd: string, port: number) {
+  const cp = exec(command, {
+    cwd,
+    env: {
+      ...process.env,
+      PORT: `${port}`,
+    },
+  });
+  if (!cp) throw new Error(`Can't start process ${command} in ${cwd}`);
+
+  await waitPort({
+    host: "localhost",
+    port,
+    timeout: 1000 * 60,
+  });
+  return cp;
+}
+
+export function runCreateLlama(
+  cwd: string,
+  templateType: string,
+  templateFramework: string,
+  templateEngine: string,
+  templateUI: string,
+  appType: AppType,
+) {
+  const createLlama = path.join(__dirname, "..", "dist", "index.js");
+
+  const name = [
+    templateType,
+    templateFramework,
+    templateEngine,
+    templateUI,
+    appType,
+  ].join("-");
+  const command = [
+    "node",
+    createLlama,
+    name,
+    "--template",
+    templateType,
+    "--framework",
+    templateFramework,
+    "--engine",
+    templateEngine,
+    "--ui",
+    templateUI,
+    "--model",
+    MODEL,
+    "--open-ai-key",
+    "testKey",
+    appType,
+    "--eslint",
+    "--use-npm",
+  ].join(" ");
+  console.log(`running command '${command}' in ${cwd}`);
+  execSync(command, {
+    stdio: "inherit",
+    cwd,
+  });
+  return name;
+}
+export async function createTestDir() {
+  const cwd = path.join(__dirname, ".cache", crypto.randomUUID());
+  await mkdir(cwd, { recursive: true });
+  return cwd;
+}
diff --git a/packages/create-llama/index.ts b/packages/create-llama/index.ts
index 72ee5cd8ba00eef6c6e61c3b326743f47df0d5c3..764be112fbf79046e1f41720f2074d6c3279d8a3 100644
--- a/packages/create-llama/index.ts
+++ b/packages/create-llama/index.ts
@@ -61,6 +61,55 @@ const program = new Commander.Command(packageJson.name)
     `
 
   Explicitly tell the CLI to reset any stored preferences
+`,
+  )
+  .option(
+    "--template <template>",
+    `
+
+  Select a template to bootstrap the application with.
+`,
+  )
+  .option(
+    "--engine <engine>",
+    `
+
+  Select a chat engine to bootstrap the application with.
+`,
+  )
+  .option(
+    "--framework <framework>",
+    `
+
+  Select a framework to bootstrap the application with.
+`,
+  )
+  .option(
+    "--open-ai-key <key>",
+    `
+
+  Provide an OpenAI API key.
+`,
+  )
+  .option(
+    "--ui <ui>",
+    `
+
+  Select a UI to bootstrap the application with.
+`,
+  )
+  .option(
+    "--frontend",
+    `
+
+  Whether to generate a frontend for your backend.
+`,
+  )
+  .option(
+    "--model",
+    `
+
+  Select OpenAI model to use. E.g. gpt-3.5-turbo.
 `,
   )
   .allowUnknownOption()
@@ -113,7 +162,7 @@ async function run(): Promise<void> {
       "\nPlease specify the project directory:\n" +
         `  ${cyan(program.name())} ${green("<project-directory>")}\n` +
         "For example:\n" +
-        `  ${cyan(program.name())} ${green("my-next-app")}\n\n` +
+        `  ${cyan(program.name())} ${green("my-app")}\n\n` +
         `Run ${cyan(`${program.name()} --help`)} to see all options.`,
     );
     process.exit(1);
@@ -157,7 +206,7 @@ async function run(): Promise<void> {
     packageManager,
     eslint: program.eslint,
     frontend: program.frontend,
-    openAIKey: program.openAIKey,
+    openAiKey: program.openAiKey,
     model: program.model,
     communityProjectPath: program.communityProjectPath,
   });
diff --git a/packages/create-llama/package.json b/packages/create-llama/package.json
index f69cb026374920048a3f67f7674795fa05f14722..057f2e0c82fc10d33c5736f455fd46c6f713ffca 100644
--- a/packages/create-llama/package.json
+++ b/packages/create-llama/package.json
@@ -23,9 +23,11 @@
     "dev": "ncc build ./index.ts -w -o dist/",
     "build": "ncc build ./index.ts -o ./dist/ --minify --no-cache --no-source-map-register",
     "lint": "eslint . --ignore-pattern dist",
+    "e2e": "playwright test --reporter=list",
     "prepublishOnly": "cd ../../ && turbo run build"
   },
   "devDependencies": {
+    "@playwright/test": "^1.40.0",
     "@types/async-retry": "1.4.2",
     "@types/ci-info": "2.0.0",
     "@types/cross-spawn": "6.0.0",
@@ -47,9 +49,10 @@
     "tar": "6.1.15",
     "terminal-link": "^3.0.0",
     "update-check": "1.5.4",
-    "validate-npm-package-name": "3.0.0"
+    "validate-npm-package-name": "3.0.0",
+    "wait-port": "^1.1.0"
   },
   "engines": {
     "node": ">=16.14.0"
   }
-}
+}
\ No newline at end of file
diff --git a/packages/create-llama/playwright.config.ts b/packages/create-llama/playwright.config.ts
new file mode 100644
index 0000000000000000000000000000000000000000..0b4b420b7275e2035c1e9b2e60e17ef78dd16e9d
--- /dev/null
+++ b/packages/create-llama/playwright.config.ts
@@ -0,0 +1,21 @@
+/* eslint-disable turbo/no-undeclared-env-vars */
+import { defineConfig, devices } from "@playwright/test";
+
+export default defineConfig({
+  testDir: "./e2e",
+  fullyParallel: true,
+  forbidOnly: !!process.env.CI,
+  retries: process.env.CI ? 2 : 0,
+  workers: process.env.CI ? 1 : undefined,
+  timeout: 1000 * 60 * 5,
+  reporter: "html",
+  use: {
+    trace: "on-first-retry",
+  },
+  projects: [
+    {
+      name: "chromium",
+      use: { ...devices["Desktop Chrome"] },
+    },
+  ],
+});
diff --git a/packages/create-llama/questions.ts b/packages/create-llama/questions.ts
index 7f0786acb0197988b8423025c09fe796c98f6085..c3f5e696d4bab00ca8b94882f10df32a78cad37c 100644
--- a/packages/create-llama/questions.ts
+++ b/packages/create-llama/questions.ts
@@ -14,7 +14,7 @@ const defaults: QuestionArgs = {
   ui: "html",
   eslint: true,
   frontend: false,
-  openAIKey: "",
+  openAiKey: "",
   model: "gpt-3.5-turbo",
   communityProjectPath: "",
 };
@@ -131,8 +131,11 @@ export const askQuestions = async (
   }
 
   if (program.framework === "express" || program.framework === "fastapi") {
+    if (process.argv.includes("--no-frontend")) {
+      program.frontend = false;
+    }
     // if a backend-only framework is selected, ask whether we should create a frontend
-    if (!program.frontend) {
+    if (program.frontend === undefined) {
       if (ciInfo.isCI) {
         program.frontend = getPrefOrDefault("frontend");
       } else {
@@ -157,6 +160,9 @@ export const askQuestions = async (
         preferences.frontend = Boolean(frontend);
       }
     }
+  } else {
+    // single project if framework is nextjs
+    program.frontend = false;
   }
 
   if (program.framework === "nextjs" || program.frontend) {
@@ -239,7 +245,7 @@ export const askQuestions = async (
     }
   }
 
-  if (!program.openAIKey) {
+  if (!program.openAiKey) {
     const { key } = await prompts(
       {
         type: "text",
@@ -248,8 +254,8 @@ export const askQuestions = async (
       },
       handlers,
     );
-    program.openAIKey = key;
-    preferences.openAIKey = key;
+    program.openAiKey = key;
+    preferences.openAiKey = key;
   }
 
   if (
@@ -274,4 +280,10 @@ export const askQuestions = async (
       preferences.eslint = Boolean(eslint);
     }
   }
+
+  // TODO: consider using zod to validate the input (doesn't work like this as not every option is required)
+  // templateUISchema.parse(program.ui);
+  // templateEngineSchema.parse(program.engine);
+  // templateFrameworkSchema.parse(program.framework);
+  // templateTypeSchema.parse(program.template);``
 };
diff --git a/packages/create-llama/templates/index.ts b/packages/create-llama/templates/index.ts
index cd675448cbc5b93847d6ea28a449b90826fa8c56..79746b62d67c79167f629d45972bc6072f9554ab 100644
--- a/packages/create-llama/templates/index.ts
+++ b/packages/create-llama/templates/index.ts
@@ -16,12 +16,12 @@ import {
   TemplateFramework,
 } from "./types";
 
-const createEnvLocalFile = async (root: string, openAIKey?: string) => {
-  if (openAIKey) {
+const createEnvLocalFile = async (root: string, openAiKey?: string) => {
+  if (openAiKey) {
     const envFileName = ".env";
     await fs.writeFile(
       path.join(root, envFileName),
-      `OPENAI_API_KEY=${openAIKey}\n`,
+      `OPENAI_API_KEY=${openAiKey}\n`,
     );
     console.log(`Created '${envFileName}' file containing OPENAI_API_KEY`);
   }
@@ -32,7 +32,7 @@ const copyTestData = async (
   framework: TemplateFramework,
   packageManager?: PackageManager,
   engine?: TemplateEngine,
-  openAIKey?: string,
+  openAiKey?: string,
 ) => {
   if (framework === "nextjs") {
     // XXX: This is a hack to make the build for nextjs work with pdf-parse
@@ -53,7 +53,7 @@ const copyTestData = async (
   }
 
   if (packageManager && engine === "context") {
-    if (openAIKey || process.env["OPENAI_API_KEY"]) {
+    if (openAiKey || process.env["OPENAI_API_KEY"]) {
       console.log(
         `\nRunning ${cyan(
           `${packageManager} run generate`,
@@ -341,7 +341,7 @@ export const installTemplate = async (
     // This is a backend, so we need to copy the test data and create the env file.
 
     // Copy the environment file to the target directory.
-    await createEnvLocalFile(props.root, props.openAIKey);
+    await createEnvLocalFile(props.root, props.openAiKey);
 
     // Copy test pdf file
     await copyTestData(
@@ -349,7 +349,7 @@ export const installTemplate = async (
       props.framework,
       props.packageManager,
       props.engine,
-      props.openAIKey,
+      props.openAiKey,
     );
   }
 };
diff --git a/packages/create-llama/templates/types.ts b/packages/create-llama/templates/types.ts
index b6ff2f835fee0efae287077e2a13fdae539fe08f..eaab3951eac4637c762422195e049879a256ccc2 100644
--- a/packages/create-llama/templates/types.ts
+++ b/packages/create-llama/templates/types.ts
@@ -16,7 +16,7 @@ export interface InstallTemplateArgs {
   ui: TemplateUI;
   eslint: boolean;
   customApiPath?: string;
-  openAIKey?: string;
+  openAiKey?: string;
   forBackend?: string;
   model: string;
   communityProjectPath?: string;
diff --git a/packages/create-llama/templates/types/simple/express/index.ts b/packages/create-llama/templates/types/simple/express/index.ts
index daf5d8b6e82599243a1becc1d8e85c0de769e9dd..830c549f51dc4e0d540673a9abd1913ea1b44444 100644
--- a/packages/create-llama/templates/types/simple/express/index.ts
+++ b/packages/create-llama/templates/types/simple/express/index.ts
@@ -1,10 +1,11 @@
+/* eslint-disable turbo/no-undeclared-env-vars */
 import cors from "cors";
 import "dotenv/config";
 import express, { Express, Request, Response } from "express";
 import chatRouter from "./src/routes/chat.route";
 
 const app: Express = express();
-const port = 8000;
+const port = parseInt(process.env.PORT || "8000");
 
 const env = process.env["NODE_ENV"];
 const isDevelopment = !env || env === "development";
diff --git a/packages/create-llama/templates/types/streaming/express/index.ts b/packages/create-llama/templates/types/streaming/express/index.ts
index daf5d8b6e82599243a1becc1d8e85c0de769e9dd..830c549f51dc4e0d540673a9abd1913ea1b44444 100644
--- a/packages/create-llama/templates/types/streaming/express/index.ts
+++ b/packages/create-llama/templates/types/streaming/express/index.ts
@@ -1,10 +1,11 @@
+/* eslint-disable turbo/no-undeclared-env-vars */
 import cors from "cors";
 import "dotenv/config";
 import express, { Express, Request, Response } from "express";
 import chatRouter from "./src/routes/chat.route";
 
 const app: Express = express();
-const port = 8000;
+const port = parseInt(process.env.PORT || "8000");
 
 const env = process.env["NODE_ENV"];
 const isDevelopment = !env || env === "development";
diff --git a/packages/create-llama/tsconfig.json b/packages/create-llama/tsconfig.json
index e4edad9e127c7de102ca7bdd6b243168a83bbc20..a653e907d965449ec6e9f82778b7a2e722f9bc02 100644
--- a/packages/create-llama/tsconfig.json
+++ b/packages/create-llama/tsconfig.json
@@ -7,5 +7,8 @@
     "esModuleInterop": true,
     "skipLibCheck": false
   },
-  "exclude": ["templates", "dist"]
-}
+  "exclude": [
+    "templates",
+    "dist"
+  ]
+}
\ No newline at end of file
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 079de736ae46d5bb6af79677bb4773e12547620a..33f5341147c4bc71214789168b9adbd9de3126b5 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -240,6 +240,9 @@ importers:
 
   packages/create-llama:
     devDependencies:
+      '@playwright/test':
+        specifier: ^1.40.0
+        version: 1.40.0
       '@types/async-retry':
         specifier: 1.4.2
         version: 1.4.2
@@ -306,6 +309,9 @@ importers:
       validate-npm-package-name:
         specifier: 3.0.0
         version: 3.0.0
+      wait-port:
+        specifier: ^1.1.0
+        version: 1.1.0
 
   packages/eslint-config-custom:
     dependencies:
@@ -3903,6 +3909,14 @@ packages:
       tslib: 2.6.1
     dev: false
 
+  /@playwright/test@1.40.0:
+    resolution: {integrity: sha512-PdW+kn4eV99iP5gxWNSDQCbhMaDVej+RXL5xr6t04nbKLCBwYtA046t7ofoczHOm8u6c+45hpDKQVZqtqwkeQg==}
+    engines: {node: '>=16'}
+    hasBin: true
+    dependencies:
+      playwright: 1.40.0
+    dev: true
+
   /@polka/url@1.0.0-next.23:
     resolution: {integrity: sha512-C16M+IYz0rgRhWZdCmK+h58JMv8vijAA61gmz2rspCSwKwzBebpdcsiUmwrtJRdphuY30i6BSLEOP8ppbNLyLg==}
     dev: false
@@ -6326,6 +6340,11 @@ packages:
     engines: {node: '>= 12'}
     dev: false
 
+  /commander@9.5.0:
+    resolution: {integrity: sha512-KRs7WVDKg86PWiuAqhDrAQnTXZKraVcCc6vFdL14qrZ/DcWwuRo7VoiYXalXO7S5GKpqYiVEwCbgFDfxNHKJBQ==}
+    engines: {node: ^12.20.0 || >=14}
+    dev: true
+
   /commondir@1.0.1:
     resolution: {integrity: sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==}
     dev: false
@@ -8559,6 +8578,14 @@ packages:
   /fs.realpath@1.0.0:
     resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==}
 
+  /fsevents@2.3.2:
+    resolution: {integrity: sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==}
+    engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
+    os: [darwin]
+    requiresBuild: true
+    dev: true
+    optional: true
+
   /fsevents@2.3.3:
     resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==}
     engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
@@ -12147,6 +12174,22 @@ packages:
     resolution: {integrity: sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg==}
     dev: false
 
+  /playwright-core@1.40.0:
+    resolution: {integrity: sha512-fvKewVJpGeca8t0ipM56jkVSU6Eo0RmFvQ/MaCQNDYm+sdvKkMBBWTE1FdeMqIdumRaXXjZChWHvIzCGM/tA/Q==}
+    engines: {node: '>=16'}
+    hasBin: true
+    dev: true
+
+  /playwright@1.40.0:
+    resolution: {integrity: sha512-gyHAgQjiDf1m34Xpwzaqb76KgfzYrhK7iih+2IzcOCoZWr/8ZqmdBw+t0RU85ZmfJMgtgAiNtBQ/KS2325INXw==}
+    engines: {node: '>=16'}
+    hasBin: true
+    dependencies:
+      playwright-core: 1.40.0
+    optionalDependencies:
+      fsevents: 2.3.2
+    dev: true
+
   /portkey-ai@0.1.16:
     resolution: {integrity: sha512-EY4FRp6PZSD75Q1o1qc08DfPNTG9FnkUPN3Z1/lEvaq9iFpSO5UekcagUZaKSVhao311qjBjns+kF0rS9ht7iA==}
     dependencies:
@@ -15655,6 +15698,18 @@ packages:
       - debug
     dev: false
 
+  /wait-port@1.1.0:
+    resolution: {integrity: sha512-3e04qkoN3LxTMLakdqeWth8nih8usyg+sf1Bgdf9wwUkp05iuK1eSY/QpLvscT/+F/gA89+LpUmmgBtesbqI2Q==}
+    engines: {node: '>=10'}
+    hasBin: true
+    dependencies:
+      chalk: 4.1.2
+      commander: 9.5.0
+      debug: 4.3.4
+    transitivePeerDependencies:
+      - supports-color
+    dev: true
+
   /walker@1.0.8:
     resolution: {integrity: sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==}
     dependencies: