diff --git a/.changeset/chilled-snakes-act.md b/.changeset/chilled-snakes-act.md
new file mode 100644
index 0000000000000000000000000000000000000000..8b36a2c0ec94fb4d370a387f29baa86b13307f51
--- /dev/null
+++ b/.changeset/chilled-snakes-act.md
@@ -0,0 +1,5 @@
+---
+"create-llama": patch
+---
+
+feat: add test deps for llamaparse
diff --git a/e2e/extractor_template.spec.ts b/e2e/extractor_template.spec.ts
index 4644cd799afc0674a56bef575072173bac782088..0818e7c7b3f41975b603fd9db55126a95c550ea1 100644
--- a/e2e/extractor_template.spec.ts
+++ b/e2e/extractor_template.spec.ts
@@ -32,16 +32,16 @@ if (
       cwd = await createTestDir();
       frontendPort = Math.floor(Math.random() * 10000) + 10000;
       backendPort = frontendPort + 1;
-      const result = await runCreateLlama(
+      const result = await runCreateLlama({
         cwd,
-        "extractor",
-        "fastapi",
-        "--example-file",
-        "none",
-        frontendPort,
-        backendPort,
-        "runApp",
-      );
+        templateType: "extractor",
+        templateFramework: "fastapi",
+        dataSource: "--example-file",
+        vectorDb: "none",
+        port: frontendPort,
+        externalPort: backendPort,
+        postInstallAction: "runApp",
+      });
       name = result.projectName;
       appProcess = result.appProcess;
     });
diff --git a/e2e/multiagent_template.spec.ts b/e2e/multiagent_template.spec.ts
index c69e34c2a36d0de72c03b994830b9b7cd16de97e..619b8cd15e1f71ded3ffe4fec678904bcbd97616 100644
--- a/e2e/multiagent_template.spec.ts
+++ b/e2e/multiagent_template.spec.ts
@@ -36,18 +36,18 @@ test.describe(`Test multiagent template ${templateFramework} ${dataSource} ${tem
     port = Math.floor(Math.random() * 10000) + 10000;
     externalPort = port + 1;
     cwd = await createTestDir();
-    const result = await runCreateLlama(
+    const result = await runCreateLlama({
       cwd,
-      "multiagent",
+      templateType: "multiagent",
       templateFramework,
       dataSource,
       vectorDb,
       port,
       externalPort,
-      templatePostInstallAction,
+      postInstallAction: templatePostInstallAction,
       templateUI,
       appType,
-    );
+    });
     name = result.projectName;
     appProcess = result.appProcess;
   });
diff --git a/e2e/resolve_python_dependencies.spec.ts b/e2e/resolve_python_dependencies.spec.ts
index d48e82d5c308912b8d9b47d95d5caca1c78184cd..b678a107eebc1aa7823808436afa3a414cc41a74 100644
--- a/e2e/resolve_python_dependencies.spec.ts
+++ b/e2e/resolve_python_dependencies.spec.ts
@@ -53,21 +53,21 @@ if (
           test(`options: ${optionDescription}`, async () => {
             const cwd = await createTestDir();
 
-            const result = await runCreateLlama(
+            const result = await runCreateLlama({
               cwd,
-              "streaming",
-              "fastapi",
+              templateType: "streaming",
+              templateFramework: "fastapi",
               dataSource,
               vectorDb,
-              3000, // port
-              8000, // externalPort
-              "none", // postInstallAction
-              undefined, // ui
-              "--no-frontend", // appType
-              undefined, // llamaCloudProjectName
-              undefined, // llamaCloudIndexName
-              tool,
-            );
+              port: 3000, // port
+              externalPort: 8000, // externalPort
+              postInstallAction: "none", // postInstallAction
+              templateUI: undefined, // ui
+              appType: "--no-frontend", // appType
+              llamaCloudProjectName: undefined, // llamaCloudProjectName
+              llamaCloudIndexName: undefined, // llamaCloudIndexName
+              tools: tool,
+            });
             const name = result.projectName;
 
             // Check if the app folder exists
diff --git a/e2e/resolve_ts_dependencies.spec.ts b/e2e/resolve_ts_dependencies.spec.ts
index 6b67666f794108da1dcfe0e39738b32460b82360..7e77530341745f04ee98b96e87b734bd1b4c613d 100644
--- a/e2e/resolve_ts_dependencies.spec.ts
+++ b/e2e/resolve_ts_dependencies.spec.ts
@@ -19,6 +19,7 @@ if (
   templateFramework == "nextjs" ||
   templateFramework == "express" // test is only relevant for TS projects
 ) {
+  const llamaParseOptions = [true, false];
   // vectorDBs combinations to test
   const vectorDbs: TemplateVectorDB[] = [
     "mongo",
@@ -33,65 +34,69 @@ if (
   ];
 
   test.describe("Test resolve TS dependencies", () => {
-    for (const vectorDb of vectorDbs) {
-      const optionDescription = `vectorDb: ${vectorDb}, dataSource: ${dataSource}`;
+    for (const llamaParseOpt of llamaParseOptions) {
+      for (const vectorDb of vectorDbs) {
+        const optionDescription = `vectorDb: ${vectorDb}, dataSource: ${dataSource}, llamaParse: ${llamaParseOpt}`;
 
-      test(`options: ${optionDescription}`, async () => {
-        const cwd = await createTestDir();
+        test(`options: ${optionDescription}`, async () => {
+          const cwd = await createTestDir();
 
-        const result = await runCreateLlama(
-          cwd,
-          "streaming",
-          templateFramework,
-          dataSource,
-          vectorDb,
-          3000, // port
-          8000, // externalPort
-          "none", // postInstallAction
-          undefined, // ui
-          templateFramework === "nextjs" ? "" : "--no-frontend", // appType
-          undefined, // llamaCloudProjectName
-          undefined, // llamaCloudIndexName
-        );
-        const name = result.projectName;
+          const result = await runCreateLlama({
+            cwd: cwd,
+            templateType: "streaming",
+            templateFramework: templateFramework,
+            dataSource: dataSource,
+            vectorDb: vectorDb,
+            port: 3000,
+            externalPort: 8000,
+            postInstallAction: "none",
+            templateUI: undefined,
+            appType: templateFramework === "nextjs" ? "" : "--no-frontend",
+            llamaCloudProjectName: undefined,
+            llamaCloudIndexName: undefined,
+            tools: undefined,
+            useLlamaParse: llamaParseOpt,
+          });
+          const name = result.projectName;
 
-        // Check if the app folder exists
-        const appDir = path.join(cwd, name);
-        const dirExists = fs.existsSync(appDir);
-        expect(dirExists).toBeTruthy();
+          // Check if the app folder exists
+          const appDir = path.join(cwd, name);
+          const dirExists = fs.existsSync(appDir);
+          expect(dirExists).toBeTruthy();
 
-        // Install dependencies using pnpm
-        try {
-          const { stderr: installStderr } = await execAsync(
-            "pnpm install --prefer-offline",
-            {
-              cwd: appDir,
-            },
-          );
-          expect(installStderr).toBeFalsy();
-        } catch (error) {
-          console.error("Error installing dependencies:", error);
-          throw error;
-        }
+          // Install dependencies using pnpm
+          try {
+            const { stderr: installStderr } = await execAsync(
+              "pnpm install --prefer-offline",
+              {
+                cwd: appDir,
+              },
+            );
+            expect(installStderr).toBeFalsy();
+          } catch (error) {
+            console.error("Error installing dependencies:", error);
+            throw error;
+          }
 
-        // Run tsc type check and capture the output
-        try {
-          const { stdout, stderr } = await execAsync(
-            "pnpm exec tsc -b --diagnostics",
-            {
-              cwd: appDir,
-            },
-          );
-          // Check if there's any error output
-          expect(stderr).toBeFalsy();
+          // Run tsc type check and capture the output
+          try {
+            const { stdout, stderr } = await execAsync(
+              "pnpm exec tsc -b --diagnostics",
+              {
+                cwd: appDir,
+              },
+            );
+            // Check if there's any error output
+            expect(stderr).toBeFalsy();
 
-          // Log the stdout for debugging purposes
-          console.log("TypeScript type-check output:", stdout);
-        } catch (error) {
-          console.error("Error running tsc:", error);
-          throw error;
-        }
-      });
+            // Log the stdout for debugging purposes
+            console.log("TypeScript type-check output:", stdout);
+          } catch (error) {
+            console.error("Error running tsc:", error);
+            throw error;
+          }
+        });
+      }
     }
   });
 }
diff --git a/e2e/streaming_template.spec.ts b/e2e/streaming_template.spec.ts
index 73c5b146560a346c8e10cc354c8c5670230378a2..53eb2318f97458bee335933a8129dac1e6500be7 100644
--- a/e2e/streaming_template.spec.ts
+++ b/e2e/streaming_template.spec.ts
@@ -39,20 +39,20 @@ test.describe(`Test streaming template ${templateFramework} ${dataSource} ${temp
     port = Math.floor(Math.random() * 10000) + 10000;
     externalPort = port + 1;
     cwd = await createTestDir();
-    const result = await runCreateLlama(
+    const result = await runCreateLlama({
       cwd,
-      "streaming",
+      templateType: "streaming",
       templateFramework,
       dataSource,
       vectorDb,
       port,
       externalPort,
-      templatePostInstallAction,
+      postInstallAction: templatePostInstallAction,
       templateUI,
       appType,
       llamaCloudProjectName,
       llamaCloudIndexName,
-    );
+    });
     name = result.projectName;
     appProcess = result.appProcess;
   });
diff --git a/e2e/utils.ts b/e2e/utils.ts
index 7c988617e87807ba5daaa6b5bd395f6458b8de6d..361ad7c6c5fe7b2773a9e293314f96f9fd30b2c2 100644
--- a/e2e/utils.ts
+++ b/e2e/utils.ts
@@ -18,22 +18,39 @@ export type CreateLlamaResult = {
   appProcess: ChildProcess;
 };
 
-// eslint-disable-next-line max-params
-export async function runCreateLlama(
-  cwd: string,
-  templateType: TemplateType,
-  templateFramework: TemplateFramework,
-  dataSource: string,
-  vectorDb: TemplateVectorDB,
-  port: number,
-  externalPort: number,
-  postInstallAction: TemplatePostInstallAction,
-  templateUI?: TemplateUI,
-  appType?: AppType,
-  llamaCloudProjectName?: string,
-  llamaCloudIndexName?: string,
-  tools?: string,
-): Promise<CreateLlamaResult> {
+export type RunCreateLlamaOptions = {
+  cwd: string;
+  templateType: TemplateType;
+  templateFramework: TemplateFramework;
+  dataSource: string;
+  vectorDb: TemplateVectorDB;
+  port: number;
+  externalPort: number;
+  postInstallAction: TemplatePostInstallAction;
+  templateUI?: TemplateUI;
+  appType?: AppType;
+  llamaCloudProjectName?: string;
+  llamaCloudIndexName?: string;
+  tools?: string;
+  useLlamaParse?: boolean;
+};
+
+export async function runCreateLlama({
+  cwd,
+  templateType,
+  templateFramework,
+  dataSource,
+  vectorDb,
+  port,
+  externalPort,
+  postInstallAction,
+  templateUI,
+  appType,
+  llamaCloudProjectName,
+  llamaCloudIndexName,
+  tools,
+  useLlamaParse,
+}: RunCreateLlamaOptions): Promise<CreateLlamaResult> {
   if (!process.env.OPENAI_API_KEY || !process.env.LLAMA_CLOUD_API_KEY) {
     throw new Error(
       "Setting the OPENAI_API_KEY and LLAMA_CLOUD_API_KEY is mandatory to run tests",
@@ -80,7 +97,6 @@ export async function runCreateLlama(
     postInstallAction,
     "--tools",
     tools ?? "none",
-    "--no-llama-parse",
     "--observability",
     "none",
     "--llama-cloud-key",
@@ -93,6 +109,9 @@ export async function runCreateLlama(
   if (appType) {
     commandArgs.push(appType);
   }
+  if (!useLlamaParse) {
+    commandArgs.push("--no-llama-parse");
+  }
 
   const command = commandArgs.join(" ");
   console.log(`running command '${command}' in ${cwd}`);