diff --git a/helpers/providers/anthropic.ts b/helpers/providers/anthropic.ts
index 1239a0c4954975a077b136949e8f4c4371202fc2..db7e4f06554ae4801a4dadc4e9f60091941de566 100644
--- a/helpers/providers/anthropic.ts
+++ b/helpers/providers/anthropic.ts
@@ -1,7 +1,7 @@
 import ciInfo from "ci-info";
 import prompts from "prompts";
 import { ModelConfigParams } from ".";
-import { questionHandlers, toChoice } from "../../questions";
+import { questionHandlers, toChoice } from "../../questions/utils";
 
 const MODELS = [
   "claude-3-opus",
diff --git a/helpers/providers/azure.ts b/helpers/providers/azure.ts
index e450715f543d35622e111c32ae9829cc69978ec0..28250ecee605bf0480d4e5d39b0a949cd2ab5028 100644
--- a/helpers/providers/azure.ts
+++ b/helpers/providers/azure.ts
@@ -1,7 +1,7 @@
 import ciInfo from "ci-info";
 import prompts from "prompts";
 import { ModelConfigParams, ModelConfigQuestionsParams } from ".";
-import { questionHandlers } from "../../questions";
+import { questionHandlers } from "../../questions/utils";
 
 const ALL_AZURE_OPENAI_CHAT_MODELS: Record<string, { openAIModel: string }> = {
   "gpt-35-turbo": { openAIModel: "gpt-3.5-turbo" },
diff --git a/helpers/providers/gemini.ts b/helpers/providers/gemini.ts
index b0f6733f69d296b767af3f496a7686fb15e8a4de..50096bded9c6cc8c729fbaf9ba6825abc26e21ad 100644
--- a/helpers/providers/gemini.ts
+++ b/helpers/providers/gemini.ts
@@ -1,7 +1,7 @@
 import ciInfo from "ci-info";
 import prompts from "prompts";
 import { ModelConfigParams } from ".";
-import { questionHandlers, toChoice } from "../../questions";
+import { questionHandlers, toChoice } from "../../questions/utils";
 
 const MODELS = ["gemini-1.5-pro-latest", "gemini-pro", "gemini-pro-vision"];
 type ModelData = {
diff --git a/helpers/providers/groq.ts b/helpers/providers/groq.ts
index 075475e3998d7199a6f98bb4d874313041d28998..33394475fb350eba8dd2e372e60713b5550e36c5 100644
--- a/helpers/providers/groq.ts
+++ b/helpers/providers/groq.ts
@@ -1,7 +1,7 @@
 import ciInfo from "ci-info";
 import prompts from "prompts";
 import { ModelConfigParams } from ".";
-import { questionHandlers, toChoice } from "../../questions";
+import { questionHandlers, toChoice } from "../../questions/utils";
 
 import got from "got";
 import ora from "ora";
diff --git a/helpers/providers/index.ts b/helpers/providers/index.ts
index c19efaa4eab5c3069feeb87f0346cfcca422c8cb..06977f6fc048c1ba177200eee919008a1855ae1b 100644
--- a/helpers/providers/index.ts
+++ b/helpers/providers/index.ts
@@ -1,6 +1,5 @@
-import ciInfo from "ci-info";
 import prompts from "prompts";
-import { questionHandlers } from "../../questions";
+import { questionHandlers } from "../../questions/utils";
 import { ModelConfig, ModelProvider, TemplateFramework } from "../types";
 import { askAnthropicQuestions } from "./anthropic";
 import { askAzureQuestions } from "./azure";
@@ -27,7 +26,7 @@ export async function askModelConfig({
   framework,
 }: ModelConfigQuestionsParams): Promise<ModelConfig> {
   let modelProvider: ModelProvider = DEFAULT_MODEL_PROVIDER;
-  if (askModels && !ciInfo.isCI) {
+  if (askModels) {
     let choices = [
       { title: "OpenAI", value: "openai" },
       { title: "Groq", value: "groq" },
diff --git a/helpers/providers/llmhub.ts b/helpers/providers/llmhub.ts
index b15633251b1c174f2ae688406d8afbeef79020a7..0e4a610ee30515b40b666d474459b39ef03afc0a 100644
--- a/helpers/providers/llmhub.ts
+++ b/helpers/providers/llmhub.ts
@@ -4,7 +4,7 @@ import ora from "ora";
 import { red } from "picocolors";
 import prompts from "prompts";
 import { ModelConfigParams } from ".";
-import { questionHandlers } from "../../questions";
+import { questionHandlers } from "../../questions/utils";
 
 export const TSYSTEMS_LLMHUB_API_URL =
   "https://llm-server.llmhub.t-systems.net/v2";
diff --git a/helpers/providers/mistral.ts b/helpers/providers/mistral.ts
index b892b748a9e054bef6bf3d7885b024fd4aa4af2b..c040b412db72dd607050bf00d52584e205f23f6a 100644
--- a/helpers/providers/mistral.ts
+++ b/helpers/providers/mistral.ts
@@ -1,7 +1,7 @@
 import ciInfo from "ci-info";
 import prompts from "prompts";
 import { ModelConfigParams } from ".";
-import { questionHandlers, toChoice } from "../../questions";
+import { questionHandlers, toChoice } from "../../questions/utils";
 
 const MODELS = ["mistral-tiny", "mistral-small", "mistral-medium"];
 type ModelData = {
diff --git a/helpers/providers/ollama.ts b/helpers/providers/ollama.ts
index e70b25f06336bb45911e4878d1328cd00dfc08eb..cdcbcce64257696056854866102ccf5ab867f07a 100644
--- a/helpers/providers/ollama.ts
+++ b/helpers/providers/ollama.ts
@@ -3,7 +3,7 @@ import ollama, { type ModelResponse } from "ollama";
 import { red } from "picocolors";
 import prompts from "prompts";
 import { ModelConfigParams } from ".";
-import { questionHandlers, toChoice } from "../../questions";
+import { questionHandlers, toChoice } from "../../questions/utils";
 
 type ModelData = {
   dimensions: number;
diff --git a/helpers/providers/openai.ts b/helpers/providers/openai.ts
index 2d3b213e0136952b452d15e9f2bbb7b3bfa335e4..6243f5b56ea71efba0942bac9a2344b6c7a242e8 100644
--- a/helpers/providers/openai.ts
+++ b/helpers/providers/openai.ts
@@ -4,7 +4,7 @@ import ora from "ora";
 import { red } from "picocolors";
 import prompts from "prompts";
 import { ModelConfigParams, ModelConfigQuestionsParams } from ".";
-import { questionHandlers } from "../../questions";
+import { questionHandlers } from "../../questions/utils";
 
 const OPENAI_API_URL = "https://api.openai.com/v1";
 
diff --git a/helpers/python.ts b/helpers/python.ts
index a1be02d024909aec11d6d21602ec591fa1e1bb36..58908276bdc6ebed5355606234a1931c94364a4f 100644
--- a/helpers/python.ts
+++ b/helpers/python.ts
@@ -93,6 +93,12 @@ const getAdditionalDependencies = (
       });
       break;
     }
+    case "llamacloud":
+      dependencies.push({
+        name: "llama-index-indices-managed-llama-cloud",
+        version: "^0.3.1",
+      });
+      break;
   }
 
   // Add data source dependencies
@@ -127,12 +133,6 @@ const getAdditionalDependencies = (
             version: "^2.9.9",
           });
           break;
-        case "llamacloud":
-          dependencies.push({
-            name: "llama-index-indices-managed-llama-cloud",
-            version: "^0.3.1",
-          });
-          break;
       }
     }
   }
diff --git a/helpers/types.ts b/helpers/types.ts
index e8e5265fa392d4709ed52b607f076e8f50bef2c7..89dd96644349b5fff06494babe5699e98d2c55fd 100644
--- a/helpers/types.ts
+++ b/helpers/types.ts
@@ -46,7 +46,7 @@ export type TemplateDataSource = {
   type: TemplateDataSourceType;
   config: TemplateDataSourceConfig;
 };
-export type TemplateDataSourceType = "file" | "web" | "db" | "llamacloud";
+export type TemplateDataSourceType = "file" | "web" | "db";
 export type TemplateObservability = "none" | "traceloop" | "llamatrace";
 // Config for both file and folder
 export type FileSourceConfig = {
diff --git a/index.ts b/index.ts
index e187aedfaa079aa7afb4d69293fa5a0a983243d4..6ee0edeb13fa3403c4f7eaf369f83e322bca6146 100644
--- a/index.ts
+++ b/index.ts
@@ -1,7 +1,6 @@
 /* eslint-disable import/no-extraneous-dependencies */
 import { execSync } from "child_process";
-import Commander from "commander";
-import Conf from "conf";
+import { Command } from "commander";
 import fs from "fs";
 import path from "path";
 import { bold, cyan, green, red, yellow } from "picocolors";
@@ -17,8 +16,9 @@ import { runApp } from "./helpers/run-app";
 import { getTools } from "./helpers/tools";
 import { validateNpmName } from "./helpers/validate-pkg";
 import packageJson from "./package.json";
-import { QuestionArgs, askQuestions, onPromptState } from "./questions";
-
+import { askQuestions } from "./questions/index";
+import { QuestionArgs } from "./questions/types";
+import { onPromptState } from "./questions/utils";
 // Run the initialization function
 initializeGlobalAgent();
 
@@ -29,12 +29,14 @@ const handleSigTerm = () => process.exit(0);
 process.on("SIGINT", handleSigTerm);
 process.on("SIGTERM", handleSigTerm);
 
-const program = new Commander.Command(packageJson.name)
+const program = new Command(packageJson.name)
   .version(packageJson.version)
-  .arguments("<project-directory>")
-  .usage(`${green("<project-directory>")} [options]`)
+  .arguments("[project-directory]")
+  .usage(`${green("[project-directory]")} [options]`)
   .action((name) => {
-    projectPath = name;
+    if (name) {
+      projectPath = name;
+    }
   })
   .option(
     "--use-npm",
@@ -55,13 +57,6 @@ const program = new Commander.Command(packageJson.name)
     `
 
   Explicitly tell the CLI to bootstrap the application using Yarn
-`,
-  )
-  .option(
-    "--reset-preferences",
-    `
-
-  Explicitly tell the CLI to reset any stored preferences
 `,
   )
   .option(
@@ -124,7 +119,14 @@ const program = new Commander.Command(packageJson.name)
     "--frontend",
     `
 
-  Whether to generate a frontend for your backend.
+  Generate a frontend for your backend.
+`,
+  )
+  .option(
+    "--no-frontend",
+    `
+
+  Do not generate a frontend for your backend.
 `,
   )
   .option(
@@ -161,6 +163,13 @@ const program = new Commander.Command(packageJson.name)
 
   Specify the tools you want to use by providing a comma-separated list. For example, 'wikipedia.WikipediaToolSpec,google.GoogleSearchToolSpec'. Use 'none' to not using any tools.
 `,
+    (tools, _) => {
+      if (tools === "none") {
+        return [];
+      } else {
+        return getTools(tools.split(","));
+      }
+    },
   )
   .option(
     "--use-llama-parse",
@@ -189,86 +198,66 @@ const program = new Commander.Command(packageJson.name)
 
   Allow interactive selection of LLM and embedding models of different model providers.
 `,
+    false,
   )
   .option(
-    "--ask-examples",
+    "--pro",
     `
 
-  Allow interactive selection of community templates and LlamaPacks.
+  Allow interactive selection of all features.
 `,
+    false,
   )
   .allowUnknownOption()
   .parse(process.argv);
-if (process.argv.includes("--no-frontend")) {
-  program.frontend = false;
-}
-if (process.argv.includes("--tools")) {
-  if (program.tools === "none") {
-    program.tools = [];
-  } else {
-    program.tools = getTools(program.tools.split(","));
-  }
-}
+
+const options = program.opts();
+
 if (
   process.argv.includes("--no-llama-parse") ||
-  program.template === "extractor"
+  options.template === "extractor"
 ) {
-  program.useLlamaParse = false;
+  options.useLlamaParse = false;
 }
-program.askModels = process.argv.includes("--ask-models");
-program.askExamples = process.argv.includes("--ask-examples");
 if (process.argv.includes("--no-files")) {
-  program.dataSources = [];
+  options.dataSources = [];
 } else if (process.argv.includes("--example-file")) {
-  program.dataSources = getDataSources(program.files, program.exampleFile);
+  options.dataSources = getDataSources(options.files, options.exampleFile);
 } else if (process.argv.includes("--llamacloud")) {
-  program.dataSources = [
-    {
-      type: "llamacloud",
-      config: {},
-    },
-    EXAMPLE_FILE,
-  ];
+  options.dataSources = [EXAMPLE_FILE];
+  options.vectorDb = "llamacloud";
 } else if (process.argv.includes("--web-source")) {
-  program.dataSources = [
+  options.dataSources = [
     {
       type: "web",
       config: {
-        baseUrl: program.webSource,
-        prefix: program.webSource,
+        baseUrl: options.webSource,
+        prefix: options.webSource,
         depth: 1,
       },
     },
   ];
 } else if (process.argv.includes("--db-source")) {
-  program.dataSources = [
+  options.dataSources = [
     {
       type: "db",
       config: {
-        uri: program.dbSource,
-        queries: program.dbQuery || "SELECT * FROM mytable",
+        uri: options.dbSource,
+        queries: options.dbQuery || "SELECT * FROM mytable",
       },
     },
   ];
 }
 
-const packageManager = !!program.useNpm
+const packageManager = !!options.useNpm
   ? "npm"
-  : !!program.usePnpm
+  : !!options.usePnpm
     ? "pnpm"
-    : !!program.useYarn
+    : !!options.useYarn
       ? "yarn"
       : getPkgManager();
 
 async function run(): Promise<void> {
-  const conf = new Conf({ projectName: "create-llama" });
-
-  if (program.resetPreferences) {
-    conf.clear();
-    console.log(`Preferences reset successfully`);
-    return;
-  }
-
   if (typeof projectPath === "string") {
     projectPath = projectPath.trim();
   }
@@ -331,35 +320,16 @@ async function run(): Promise<void> {
     process.exit(1);
   }
 
-  const preferences = (conf.get("preferences") || {}) as QuestionArgs;
-  await askQuestions(
-    program as unknown as QuestionArgs,
-    preferences,
-    program.openAiKey,
-  );
+  const answers = await askQuestions(options as unknown as QuestionArgs);
 
   await createApp({
-    template: program.template,
-    framework: program.framework,
-    ui: program.ui,
+    ...answers,
     appPath: resolvedProjectPath,
     packageManager,
-    frontend: program.frontend,
-    modelConfig: program.modelConfig,
-    llamaCloudKey: program.llamaCloudKey,
-    communityProjectConfig: program.communityProjectConfig,
-    llamapack: program.llamapack,
-    vectorDb: program.vectorDb,
-    externalPort: program.externalPort,
-    postInstallAction: program.postInstallAction,
-    dataSources: program.dataSources,
-    tools: program.tools,
-    useLlamaParse: program.useLlamaParse,
-    observability: program.observability,
+    externalPort: options.externalPort,
   });
-  conf.set("preferences", preferences);
 
-  if (program.postInstallAction === "VSCode") {
+  if (answers.postInstallAction === "VSCode") {
     console.log(`Starting VSCode in ${root}...`);
     try {
       execSync(`code . --new-window --goto README.md`, {
@@ -383,15 +353,15 @@ Please check ${cyan(
         )} for more information.`,
       );
     }
-  } else if (program.postInstallAction === "runApp") {
+  } else if (answers.postInstallAction === "runApp") {
     console.log(`Running app in ${root}...`);
     await runApp(
       root,
-      program.template,
-      program.frontend,
-      program.framework,
-      program.port,
-      program.externalPort,
+      answers.template,
+      answers.frontend,
+      answers.framework,
+      options.port,
+      options.externalPort,
     );
   }
 }
diff --git a/package.json b/package.json
index c115541c0400fbdc20eec94efb5aebe4d78bb391..42080e30c1a8a5b2273d26b7d0722993bac9bf20 100644
--- a/package.json
+++ b/package.json
@@ -49,8 +49,7 @@
     "async-retry": "1.3.1",
     "async-sema": "3.0.1",
     "ci-info": "github:watson/ci-info#f43f6a1cefff47fb361c88cf4b943fdbcaafe540",
-    "commander": "2.20.0",
-    "conf": "10.2.0",
+    "commander": "12.1.0",
     "cross-spawn": "7.0.3",
     "fast-glob": "3.3.1",
     "fs-extra": "11.2.0",
@@ -59,7 +58,7 @@
     "ollama": "^0.5.0",
     "ora": "^8.0.1",
     "picocolors": "1.0.0",
-    "prompts": "2.1.0",
+    "prompts": "2.4.2",
     "smol-toml": "^1.1.4",
     "tar": "6.1.15",
     "terminal-link": "^3.0.0",
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index da907277d3a1efe48a713bc8dc7c3ac5ca153464..4111e30906d39ca5c382bc7cc5e8a73282ddd190 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -42,11 +42,8 @@ importers:
         specifier: github:watson/ci-info#f43f6a1cefff47fb361c88cf4b943fdbcaafe540
         version: https://codeload.github.com/watson/ci-info/tar.gz/f43f6a1cefff47fb361c88cf4b943fdbcaafe540
       commander:
-        specifier: 2.20.0
-        version: 2.20.0
-      conf:
-        specifier: 10.2.0
-        version: 10.2.0
+        specifier: 12.1.0
+        version: 12.1.0
       cross-spawn:
         specifier: 7.0.3
         version: 7.0.3
@@ -72,8 +69,8 @@ importers:
         specifier: 1.0.0
         version: 1.0.0
       prompts:
-        specifier: 2.1.0
-        version: 2.1.0
+        specifier: 2.4.2
+        version: 2.4.2
       smol-toml:
         specifier: ^1.1.4
         version: 1.1.4
@@ -336,20 +333,9 @@ packages:
     engines: {node: '>=0.4.0'}
     hasBin: true
 
-  ajv-formats@2.1.1:
-    resolution: {integrity: sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==}
-    peerDependencies:
-      ajv: ^8.0.0
-    peerDependenciesMeta:
-      ajv:
-        optional: true
-
   ajv@6.12.6:
     resolution: {integrity: sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==}
 
-  ajv@8.13.0:
-    resolution: {integrity: sha512-PRA911Blj99jR5RMeTunVbNXMF6Lp4vZXnk5GQjcnUWUTsrXtekg/pnmFFI2u/I36Y/2bITGS30GZCXei6uNkA==}
-
   ansi-colors@4.1.3:
     resolution: {integrity: sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==}
     engines: {node: '>=6'}
@@ -410,10 +396,6 @@ packages:
   async-sema@3.0.1:
     resolution: {integrity: sha512-fKT2riE8EHAvJEfLJXZiATQWqZttjx1+tfgnVshCDrH8vlw4YC8aECe0B8MU184g+aVRFVgmfxFlKZKaozSrNw==}
 
-  atomically@1.7.0:
-    resolution: {integrity: sha512-Xcz9l0z7y9yQ9rdDaxlmaI4uJHf/T8g9hOEzJcsEqX2SjCj4J20uK7+ldkDHMbpJDK76wF7xEIgxc/vSlsfw5w==}
-    engines: {node: '>=10.12.0'}
-
   available-typed-arrays@1.0.7:
     resolution: {integrity: sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==}
     engines: {node: '>= 0.4'}
@@ -530,8 +512,9 @@ packages:
   color-name@1.1.4:
     resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==}
 
-  commander@2.20.0:
-    resolution: {integrity: sha512-7j2y+40w61zy6YC2iRNpUe/NwhNyoXrYpHMrSunaMG64nRnaf96zO/KMQR4OyN/UnE5KLyEBnKHd4aG3rskjpQ==}
+  commander@12.1.0:
+    resolution: {integrity: sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA==}
+    engines: {node: '>=18'}
 
   commander@9.5.0:
     resolution: {integrity: sha512-KRs7WVDKg86PWiuAqhDrAQnTXZKraVcCc6vFdL14qrZ/DcWwuRo7VoiYXalXO7S5GKpqYiVEwCbgFDfxNHKJBQ==}
@@ -540,10 +523,6 @@ packages:
   concat-map@0.0.1:
     resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==}
 
-  conf@10.2.0:
-    resolution: {integrity: sha512-8fLl9F04EJqjSqH+QjITQfJF8BrOVaYr1jewVgSRAEWePfxT0sku4w2hrGQ60BC/TNLGQ2pgxNlTbWQmMPFvXg==}
-    engines: {node: '>=12'}
-
   cross-spawn@5.1.0:
     resolution: {integrity: sha512-pTgQJ5KC0d2hcY8eyL1IzlBPYjTkyH72XRZPnLyKus2mBfNjQs3klqbJU2VILqZryAZUt9JOb3h/mWMy23/f5A==}
 
@@ -576,10 +555,6 @@ packages:
     resolution: {integrity: sha512-t/Ygsytq+R995EJ5PZlD4Cu56sWa8InXySaViRzw9apusqsOO2bQP+SbYzAhR0pFKoB+43lYy8rWban9JSuXnA==}
     engines: {node: '>= 0.4'}
 
-  debounce-fn@4.0.0:
-    resolution: {integrity: sha512-8pYCQiL9Xdcg0UPSD3d+0KMlOjp+KGU5EPwYddgzQ7DATsg4fuUDjQtsYLmWjnk2obnNHgV3vE2Y4jejSOJVBQ==}
-    engines: {node: '>=10'}
-
   debug@4.3.4:
     resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==}
     engines: {node: '>=6.0'}
@@ -638,10 +613,6 @@ packages:
     resolution: {integrity: sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==}
     engines: {node: '>=6.0.0'}
 
-  dot-prop@6.0.1:
-    resolution: {integrity: sha512-tE7ztYzXHIeyvc7N+hR3oi7FIbf/NIjVP9hmAt3yMXzrQ072/fpjGLx2GxNxGxUl5V73MEqYzioOMoVhGMJ5cA==}
-    engines: {node: '>=10'}
-
   duplexer3@0.1.5:
     resolution: {integrity: sha512-1A8za6ws41LQgv9HrE/66jyC5yuSjQ3L/KOpFtoBilsAK2iA2wuS5rTt1OCzIvtS2V7nVmedsUU+DGRcjBmOYA==}
 
@@ -664,10 +635,6 @@ packages:
     resolution: {integrity: sha512-rRqJg/6gd538VHvR3PSrdRBb/1Vy2YfzHqzvbhGIQpDRKIa4FgV/54b5Q1xYSxOOwKvjXweS26E0Q+nAMwp2pQ==}
     engines: {node: '>=8.6'}
 
-  env-paths@2.2.1:
-    resolution: {integrity: sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==}
-    engines: {node: '>=6'}
-
   error-ex@1.3.2:
     resolution: {integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==}
 
@@ -788,10 +755,6 @@ packages:
     resolution: {integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==}
     engines: {node: '>=8'}
 
-  find-up@3.0.0:
-    resolution: {integrity: sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==}
-    engines: {node: '>=6'}
-
   find-up@4.1.0:
     resolution: {integrity: sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==}
     engines: {node: '>=8'}
@@ -1057,10 +1020,6 @@ packages:
     resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==}
     engines: {node: '>=0.12.0'}
 
-  is-obj@2.0.0:
-    resolution: {integrity: sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==}
-    engines: {node: '>=8'}
-
   is-path-inside@3.0.3:
     resolution: {integrity: sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==}
     engines: {node: '>=8'}
@@ -1138,12 +1097,6 @@ packages:
   json-schema-traverse@0.4.1:
     resolution: {integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==}
 
-  json-schema-traverse@1.0.0:
-    resolution: {integrity: sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==}
-
-  json-schema-typed@7.0.3:
-    resolution: {integrity: sha512-7DE8mpG+/fVw+dTpjbxnx47TaMnDfOI1jwft9g1VybltZCduyRQPJPvc+zzKY9WPHxhPWczyFuYa6I8Mw4iU5A==}
-
   json-stable-stringify-without-jsonify@1.0.1:
     resolution: {integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==}
 
@@ -1182,10 +1135,6 @@ packages:
     resolution: {integrity: sha512-OfCBkGEw4nN6JLtgRidPX6QxjBQGQf72q3si2uvqyFEMbycSFFHwAZeXx6cJgFM9wmLrf9zBwCP3Ivqa+LLZPw==}
     engines: {node: '>=6'}
 
-  locate-path@3.0.0:
-    resolution: {integrity: sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==}
-    engines: {node: '>=6'}
-
   locate-path@5.0.0:
     resolution: {integrity: sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==}
     engines: {node: '>=8'}
@@ -1243,10 +1192,6 @@ packages:
     resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==}
     engines: {node: '>=6'}
 
-  mimic-fn@3.1.0:
-    resolution: {integrity: sha512-Ysbi9uYW9hFyfrThdDEQuykN4Ey6BuwPD2kpI5ES/nFTDn/98yxYNLZJcgUAKPT/mcrLLKaGzJR9YVxJrIdASQ==}
-    engines: {node: '>=8'}
-
   mimic-response@1.0.1:
     resolution: {integrity: sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==}
     engines: {node: '>=4'}
@@ -1375,10 +1320,6 @@ packages:
     resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==}
     engines: {node: '>=10'}
 
-  p-locate@3.0.0:
-    resolution: {integrity: sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==}
-    engines: {node: '>=6'}
-
   p-locate@4.1.0:
     resolution: {integrity: sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==}
     engines: {node: '>=8'}
@@ -1407,10 +1348,6 @@ packages:
     resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==}
     engines: {node: '>=8'}
 
-  path-exists@3.0.0:
-    resolution: {integrity: sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ==}
-    engines: {node: '>=4'}
-
   path-exists@4.0.0:
     resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==}
     engines: {node: '>=8'}
@@ -1449,10 +1386,6 @@ packages:
     resolution: {integrity: sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==}
     engines: {node: '>=8'}
 
-  pkg-up@3.1.0:
-    resolution: {integrity: sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA==}
-    engines: {node: '>=8'}
-
   playwright-core@1.44.0:
     resolution: {integrity: sha512-ZTbkNpFfYcGWohvTTl+xewITm7EOuqIqex0c7dNZ+aXsbrLj0qI8XlGKfPpipjm0Wny/4Lt4CJsWJk1stVS5qQ==}
     engines: {node: '>=16'}
@@ -1498,8 +1431,8 @@ packages:
     engines: {node: '>=14'}
     hasBin: true
 
-  prompts@2.1.0:
-    resolution: {integrity: sha512-+x5TozgqYdOwWsQFZizE/Tra3fKvAoy037kOyU6cgz84n8f6zxngLOV4O32kTwt9FcLCxAqw0P/c8rOr9y+Gfg==}
+  prompts@2.4.2:
+    resolution: {integrity: sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==}
     engines: {node: '>= 6'}
 
   pseudomap@1.0.2:
@@ -1557,10 +1490,6 @@ packages:
     resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==}
     engines: {node: '>=0.10.0'}
 
-  require-from-string@2.0.2:
-    resolution: {integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==}
-    engines: {node: '>=0.10.0'}
-
   require-main-filename@2.0.0:
     resolution: {integrity: sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==}
 
@@ -2306,10 +2235,6 @@ snapshots:
 
   acorn@8.11.3: {}
 
-  ajv-formats@2.1.1(ajv@8.13.0):
-    optionalDependencies:
-      ajv: 8.13.0
-
   ajv@6.12.6:
     dependencies:
       fast-deep-equal: 3.1.3
@@ -2317,13 +2242,6 @@ snapshots:
       json-schema-traverse: 0.4.1
       uri-js: 4.4.1
 
-  ajv@8.13.0:
-    dependencies:
-      fast-deep-equal: 3.1.3
-      json-schema-traverse: 1.0.0
-      require-from-string: 2.0.2
-      uri-js: 4.4.1
-
   ansi-colors@4.1.3: {}
 
   ansi-escapes@5.0.0:
@@ -2383,8 +2301,6 @@ snapshots:
 
   async-sema@3.0.1: {}
 
-  atomically@1.7.0: {}
-
   available-typed-arrays@1.0.7:
     dependencies:
       possible-typed-array-names: 1.0.0
@@ -2506,25 +2422,12 @@ snapshots:
 
   color-name@1.1.4: {}
 
-  commander@2.20.0: {}
+  commander@12.1.0: {}
 
   commander@9.5.0: {}
 
   concat-map@0.0.1: {}
 
-  conf@10.2.0:
-    dependencies:
-      ajv: 8.13.0
-      ajv-formats: 2.1.1(ajv@8.13.0)
-      atomically: 1.7.0
-      debounce-fn: 4.0.0
-      dot-prop: 6.0.1
-      env-paths: 2.2.1
-      json-schema-typed: 7.0.3
-      onetime: 5.1.2
-      pkg-up: 3.1.0
-      semver: 7.6.1
-
   cross-spawn@5.1.0:
     dependencies:
       lru-cache: 4.1.5
@@ -2568,10 +2471,6 @@ snapshots:
       es-errors: 1.3.0
       is-data-view: 1.0.1
 
-  debounce-fn@4.0.0:
-    dependencies:
-      mimic-fn: 3.1.0
-
   debug@4.3.4:
     dependencies:
       ms: 2.1.2
@@ -2621,10 +2520,6 @@ snapshots:
     dependencies:
       esutils: 2.0.3
 
-  dot-prop@6.0.1:
-    dependencies:
-      is-obj: 2.0.0
-
   duplexer3@0.1.5: {}
 
   eastasianwidth@0.2.0: {}
@@ -2644,8 +2539,6 @@ snapshots:
       ansi-colors: 4.1.3
       strip-ansi: 6.0.1
 
-  env-paths@2.2.1: {}
-
   error-ex@1.3.2:
     dependencies:
       is-arrayish: 0.2.1
@@ -2841,10 +2734,6 @@ snapshots:
     dependencies:
       to-regex-range: 5.0.1
 
-  find-up@3.0.0:
-    dependencies:
-      locate-path: 3.0.0
-
   find-up@4.1.0:
     dependencies:
       locate-path: 5.0.0
@@ -3129,8 +3018,6 @@ snapshots:
 
   is-number@7.0.0: {}
 
-  is-obj@2.0.0: {}
-
   is-path-inside@3.0.3: {}
 
   is-plain-obj@1.1.0: {}
@@ -3197,10 +3084,6 @@ snapshots:
 
   json-schema-traverse@0.4.1: {}
 
-  json-schema-traverse@1.0.0: {}
-
-  json-schema-typed@7.0.3: {}
-
   json-stable-stringify-without-jsonify@1.0.1: {}
 
   json-stringify-safe@5.0.1: {}
@@ -3239,11 +3122,6 @@ snapshots:
       pify: 4.0.1
       strip-bom: 3.0.0
 
-  locate-path@3.0.0:
-    dependencies:
-      p-locate: 3.0.0
-      path-exists: 3.0.0
-
   locate-path@5.0.0:
     dependencies:
       p-locate: 4.1.0
@@ -3301,8 +3179,6 @@ snapshots:
 
   mimic-fn@2.1.0: {}
 
-  mimic-fn@3.1.0: {}
-
   mimic-response@1.0.1: {}
 
   mimic-response@2.1.0: {}
@@ -3425,10 +3301,6 @@ snapshots:
     dependencies:
       yocto-queue: 0.1.0
 
-  p-locate@3.0.0:
-    dependencies:
-      p-limit: 2.3.0
-
   p-locate@4.1.0:
     dependencies:
       p-limit: 2.3.0
@@ -3456,8 +3328,6 @@ snapshots:
       json-parse-even-better-errors: 2.3.1
       lines-and-columns: 1.2.4
 
-  path-exists@3.0.0: {}
-
   path-exists@4.0.0: {}
 
   path-is-absolute@1.0.1: {}
@@ -3483,10 +3353,6 @@ snapshots:
     dependencies:
       find-up: 4.1.0
 
-  pkg-up@3.1.0:
-    dependencies:
-      find-up: 3.0.0
-
   playwright-core@1.44.0: {}
 
   playwright@1.44.0:
@@ -3515,7 +3381,7 @@ snapshots:
 
   prettier@3.2.5: {}
 
-  prompts@2.1.0:
+  prompts@2.4.2:
     dependencies:
       kleur: 3.0.3
       sisteransi: 1.0.5
@@ -3585,8 +3451,6 @@ snapshots:
 
   require-directory@2.1.1: {}
 
-  require-from-string@2.0.2: {}
-
   require-main-filename@2.0.0: {}
 
   resolve-from@4.0.0: {}
diff --git a/questions.ts b/questions.ts
deleted file mode 100644
index 81061f83f7bf67d1b1ba6a675efd863cccdcc54e..0000000000000000000000000000000000000000
--- a/questions.ts
+++ /dev/null
@@ -1,769 +0,0 @@
-import { execSync } from "child_process";
-import ciInfo from "ci-info";
-import fs from "fs";
-import path from "path";
-import { blue, green, red } from "picocolors";
-import prompts from "prompts";
-import { InstallAppArgs } from "./create-app";
-import {
-  TemplateDataSource,
-  TemplateDataSourceType,
-  TemplateFramework,
-  TemplateType,
-} from "./helpers";
-import { COMMUNITY_OWNER, COMMUNITY_REPO } from "./helpers/constant";
-import { EXAMPLE_FILE } from "./helpers/datasources";
-import { templatesDir } from "./helpers/dir";
-import { getAvailableLlamapackOptions } from "./helpers/llama-pack";
-import { askModelConfig } from "./helpers/providers";
-import { getProjectOptions } from "./helpers/repo";
-import {
-  supportedTools,
-  toolRequiresConfig,
-  toolsRequireConfig,
-} from "./helpers/tools";
-
-export type QuestionArgs = Omit<
-  InstallAppArgs,
-  "appPath" | "packageManager"
-> & {
-  askModels?: boolean;
-  askExamples?: boolean;
-};
-const supportedContextFileTypes = [
-  ".pdf",
-  ".doc",
-  ".docx",
-  ".xls",
-  ".xlsx",
-  ".csv",
-];
-const MACOS_FILE_SELECTION_SCRIPT = `
-osascript -l JavaScript -e '
-  a = Application.currentApplication();
-  a.includeStandardAdditions = true;
-  a.chooseFile({ withPrompt: "Please select files to process:", multipleSelectionsAllowed: true }).map(file => file.toString())
-'`;
-const MACOS_FOLDER_SELECTION_SCRIPT = `
-osascript -l JavaScript -e '
-  a = Application.currentApplication();
-  a.includeStandardAdditions = true;
-  a.chooseFolder({ withPrompt: "Please select folders to process:", multipleSelectionsAllowed: true }).map(folder => folder.toString())
-'`;
-const WINDOWS_FILE_SELECTION_SCRIPT = `
-Add-Type -AssemblyName System.Windows.Forms
-$openFileDialog = New-Object System.Windows.Forms.OpenFileDialog
-$openFileDialog.InitialDirectory = [Environment]::GetFolderPath('Desktop')
-$openFileDialog.Multiselect = $true
-$result = $openFileDialog.ShowDialog()
-if ($result -eq 'OK') {
-  $openFileDialog.FileNames
-}
-`;
-const WINDOWS_FOLDER_SELECTION_SCRIPT = `
-Add-Type -AssemblyName System.windows.forms
-$folderBrowser = New-Object System.Windows.Forms.FolderBrowserDialog
-$dialogResult = $folderBrowser.ShowDialog()
-if ($dialogResult -eq [System.Windows.Forms.DialogResult]::OK)
-{
-    $folderBrowser.SelectedPath
-}
-`;
-
-const defaults: Omit<QuestionArgs, "modelConfig"> = {
-  template: "streaming",
-  framework: "nextjs",
-  ui: "shadcn",
-  frontend: false,
-  llamaCloudKey: "",
-  useLlamaParse: false,
-  communityProjectConfig: undefined,
-  llamapack: "",
-  postInstallAction: "dependencies",
-  dataSources: [],
-  tools: [],
-};
-
-export const questionHandlers = {
-  onCancel: () => {
-    console.error("Exiting.");
-    process.exit(1);
-  },
-};
-
-const getVectorDbChoices = (framework: TemplateFramework) => {
-  const choices = [
-    {
-      title: "No, just store the data in the file system",
-      value: "none",
-    },
-    { title: "MongoDB", value: "mongo" },
-    { title: "PostgreSQL", value: "pg" },
-    { title: "Pinecone", value: "pinecone" },
-    { title: "Milvus", value: "milvus" },
-    { title: "Astra", value: "astra" },
-    { title: "Qdrant", value: "qdrant" },
-    { title: "ChromaDB", value: "chroma" },
-    { title: "Weaviate", value: "weaviate" },
-  ];
-
-  const vectordbLang = framework === "fastapi" ? "python" : "typescript";
-  const compPath = path.join(templatesDir, "components");
-  const vectordbPath = path.join(compPath, "vectordbs", vectordbLang);
-
-  const availableChoices = fs
-    .readdirSync(vectordbPath)
-    .filter((file) => fs.statSync(path.join(vectordbPath, file)).isDirectory());
-
-  const displayedChoices = choices.filter((choice) =>
-    availableChoices.includes(choice.value),
-  );
-
-  return displayedChoices;
-};
-
-export const getDataSourceChoices = (
-  framework: TemplateFramework,
-  selectedDataSource: TemplateDataSource[],
-  template?: TemplateType,
-) => {
-  // If LlamaCloud is already selected, don't show any other options
-  if (selectedDataSource.find((s) => s.type === "llamacloud")) {
-    return [];
-  }
-
-  const choices = [];
-
-  if (selectedDataSource.length > 0) {
-    choices.push({
-      title: "No",
-      value: "no",
-    });
-  }
-  if (selectedDataSource === undefined || selectedDataSource.length === 0) {
-    choices.push({
-      title: "No datasource",
-      value: "none",
-    });
-    choices.push({
-      title:
-        process.platform !== "linux"
-          ? "Use an example PDF"
-          : "Use an example PDF (you can add your own data files later)",
-      value: "exampleFile",
-    });
-  }
-
-  // Linux has many distros so we won't support file/folder picker for now
-  if (process.platform !== "linux") {
-    choices.push(
-      {
-        title: `Use local files (${supportedContextFileTypes.join(", ")})`,
-        value: "file",
-      },
-      {
-        title:
-          process.platform === "win32"
-            ? "Use a local folder"
-            : "Use local folders",
-        value: "folder",
-      },
-    );
-  }
-
-  if (framework === "fastapi" && template !== "extractor") {
-    choices.push({
-      title: "Use website content (requires Chrome)",
-      value: "web",
-    });
-    choices.push({
-      title: "Use data from a database (Mysql, PostgreSQL)",
-      value: "db",
-    });
-  }
-
-  if (!selectedDataSource.length && template !== "extractor") {
-    choices.push({
-      title: "Use managed index from LlamaCloud",
-      value: "llamacloud",
-    });
-  }
-  return choices;
-};
-
-const selectLocalContextData = async (type: TemplateDataSourceType) => {
-  try {
-    let selectedPath: string = "";
-    let execScript: string;
-    let execOpts: any = {};
-    switch (process.platform) {
-      case "win32": // Windows
-        execScript =
-          type === "file"
-            ? WINDOWS_FILE_SELECTION_SCRIPT
-            : WINDOWS_FOLDER_SELECTION_SCRIPT;
-        execOpts = { shell: "powershell.exe" };
-        break;
-      case "darwin": // MacOS
-        execScript =
-          type === "file"
-            ? MACOS_FILE_SELECTION_SCRIPT
-            : MACOS_FOLDER_SELECTION_SCRIPT;
-        break;
-      default: // Unsupported OS
-        console.log(red("Unsupported OS error!"));
-        process.exit(1);
-    }
-    selectedPath = execSync(execScript, execOpts).toString().trim();
-    const paths =
-      process.platform === "win32"
-        ? selectedPath.split("\r\n")
-        : selectedPath.split(", ");
-
-    for (const p of paths) {
-      if (
-        fs.statSync(p).isFile() &&
-        !supportedContextFileTypes.includes(path.extname(p))
-      ) {
-        console.log(
-          red(
-            `Please select a supported file type: ${supportedContextFileTypes}`,
-          ),
-        );
-        process.exit(1);
-      }
-    }
-    return paths;
-  } catch (error) {
-    console.log(
-      red(
-        "Got an error when trying to select local context data! Please try again or select another data source option.",
-      ),
-    );
-    process.exit(1);
-  }
-};
-
-export const onPromptState = (state: any) => {
-  if (state.aborted) {
-    // If we don't re-enable the terminal cursor before exiting
-    // the program, the cursor will remain hidden
-    process.stdout.write("\x1B[?25h");
-    process.stdout.write("\n");
-    process.exit(1);
-  }
-};
-
-export const askQuestions = async (
-  program: QuestionArgs,
-  preferences: QuestionArgs,
-  openAiKey?: string,
-) => {
-  const getPrefOrDefault = <K extends keyof Omit<QuestionArgs, "modelConfig">>(
-    field: K,
-  ): Omit<QuestionArgs, "modelConfig">[K] =>
-    preferences[field] ?? defaults[field];
-
-  // Ask for next action after installation
-  async function askPostInstallAction() {
-    if (program.postInstallAction === undefined) {
-      if (ciInfo.isCI) {
-        program.postInstallAction = getPrefOrDefault("postInstallAction");
-      } else {
-        const actionChoices = [
-          {
-            title: "Just generate code (~1 sec)",
-            value: "none",
-          },
-          {
-            title: "Start in VSCode (~1 sec)",
-            value: "VSCode",
-          },
-          {
-            title: "Generate code and install dependencies (~2 min)",
-            value: "dependencies",
-          },
-        ];
-
-        const modelConfigured =
-          !program.llamapack && program.modelConfig.isConfigured();
-        // If using LlamaParse, require LlamaCloud API key
-        const llamaCloudKeyConfigured = program.useLlamaParse
-          ? program.llamaCloudKey || process.env["LLAMA_CLOUD_API_KEY"]
-          : true;
-        const hasVectorDb = program.vectorDb && program.vectorDb !== "none";
-        // Can run the app if all tools do not require configuration
-        if (
-          !hasVectorDb &&
-          modelConfigured &&
-          llamaCloudKeyConfigured &&
-          !toolsRequireConfig(program.tools)
-        ) {
-          actionChoices.push({
-            title:
-              "Generate code, install dependencies, and run the app (~2 min)",
-            value: "runApp",
-          });
-        }
-
-        const { action } = await prompts(
-          {
-            type: "select",
-            name: "action",
-            message: "How would you like to proceed?",
-            choices: actionChoices,
-            initial: 1,
-          },
-          questionHandlers,
-        );
-
-        program.postInstallAction = action;
-      }
-    }
-  }
-
-  if (!program.template) {
-    if (ciInfo.isCI) {
-      program.template = getPrefOrDefault("template");
-    } else {
-      const styledRepo = blue(
-        `https://github.com/${COMMUNITY_OWNER}/${COMMUNITY_REPO}`,
-      );
-      const { template } = await prompts(
-        {
-          type: "select",
-          name: "template",
-          message: "Which template would you like to use?",
-          choices: [
-            { title: "Agentic RAG (e.g. chat with docs)", value: "streaming" },
-            {
-              title: "Multi-agent app (using workflows)",
-              value: "multiagent",
-            },
-            { title: "Structured Extractor", value: "extractor" },
-            ...(program.askExamples
-              ? [
-                  {
-                    title: `Community template from ${styledRepo}`,
-                    value: "community",
-                  },
-                  {
-                    title: "Example using a LlamaPack",
-                    value: "llamapack",
-                  },
-                ]
-              : []),
-          ],
-          initial: 0,
-        },
-        questionHandlers,
-      );
-      program.template = template;
-      preferences.template = template;
-    }
-  }
-
-  if (program.template === "community") {
-    const projectOptions = await getProjectOptions(
-      COMMUNITY_OWNER,
-      COMMUNITY_REPO,
-    );
-    const { communityProjectConfig } = await prompts(
-      {
-        type: "select",
-        name: "communityProjectConfig",
-        message: "Select community template",
-        choices: projectOptions.map(({ title, value }) => ({
-          title,
-          value: JSON.stringify(value), // serialize value to string in terminal
-        })),
-        initial: 0,
-      },
-      questionHandlers,
-    );
-    const projectConfig = JSON.parse(communityProjectConfig);
-    program.communityProjectConfig = projectConfig;
-    preferences.communityProjectConfig = projectConfig;
-    return; // early return - no further questions needed for community projects
-  }
-
-  if (program.template === "llamapack") {
-    const availableLlamaPacks = await getAvailableLlamapackOptions();
-    const { llamapack } = await prompts(
-      {
-        type: "select",
-        name: "llamapack",
-        message: "Select LlamaPack",
-        choices: availableLlamaPacks.map((pack) => ({
-          title: pack.name,
-          value: pack.folderPath,
-        })),
-        initial: 0,
-      },
-      questionHandlers,
-    );
-    program.llamapack = llamapack;
-    preferences.llamapack = llamapack;
-    await askPostInstallAction();
-    return; // early return - no further questions needed for llamapack projects
-  }
-
-  if (program.template === "extractor") {
-    // Extractor template only supports FastAPI, empty data sources, and llamacloud
-    // So we just use example file for extractor template, this allows user to choose vector database later
-    program.dataSources = [EXAMPLE_FILE];
-    program.framework = preferences.framework = "fastapi";
-  }
-  if (!program.framework) {
-    if (ciInfo.isCI) {
-      program.framework = getPrefOrDefault("framework");
-    } else {
-      const choices = [
-        { title: "NextJS", value: "nextjs" },
-        { title: "Express", value: "express" },
-        { title: "FastAPI (Python)", value: "fastapi" },
-      ];
-
-      const { framework } = await prompts(
-        {
-          type: "select",
-          name: "framework",
-          message: "Which framework would you like to use?",
-          choices,
-          initial: 0,
-        },
-        questionHandlers,
-      );
-      program.framework = framework;
-      preferences.framework = framework;
-    }
-  }
-
-  if (
-    (program.framework === "express" || program.framework === "fastapi") &&
-    (program.template === "streaming" || program.template === "multiagent")
-  ) {
-    // if a backend-only framework is selected, ask whether we should create a frontend
-    if (program.frontend === undefined) {
-      if (ciInfo.isCI) {
-        program.frontend = getPrefOrDefault("frontend");
-      } else {
-        const styledNextJS = blue("NextJS");
-        const styledBackend = green(
-          program.framework === "express"
-            ? "Express "
-            : program.framework === "fastapi"
-              ? "FastAPI (Python) "
-              : "",
-        );
-        const { frontend } = await prompts({
-          onState: onPromptState,
-          type: "toggle",
-          name: "frontend",
-          message: `Would you like to generate a ${styledNextJS} frontend for your ${styledBackend}backend?`,
-          initial: getPrefOrDefault("frontend"),
-          active: "Yes",
-          inactive: "No",
-        });
-        program.frontend = Boolean(frontend);
-        preferences.frontend = Boolean(frontend);
-      }
-    }
-  } else {
-    program.frontend = false;
-  }
-
-  if (program.framework === "nextjs" || program.frontend) {
-    if (!program.ui) {
-      program.ui = defaults.ui;
-    }
-  }
-
-  if (!program.observability && program.template === "streaming") {
-    if (ciInfo.isCI) {
-      program.observability = getPrefOrDefault("observability");
-    } else {
-      const { observability } = await prompts(
-        {
-          type: "select",
-          name: "observability",
-          message: "Would you like to set up observability?",
-          choices: [
-            { title: "No", value: "none" },
-            ...(program.framework === "fastapi"
-              ? [{ title: "LlamaTrace", value: "llamatrace" }]
-              : []),
-            { title: "Traceloop", value: "traceloop" },
-          ],
-          initial: 0,
-        },
-        questionHandlers,
-      );
-
-      program.observability = observability;
-      preferences.observability = observability;
-    }
-  }
-
-  if (!program.modelConfig) {
-    const modelConfig = await askModelConfig({
-      openAiKey,
-      askModels: program.askModels ?? false,
-      framework: program.framework,
-    });
-    program.modelConfig = modelConfig;
-    preferences.modelConfig = modelConfig;
-  }
-
-  if (!program.dataSources) {
-    if (ciInfo.isCI) {
-      program.dataSources = getPrefOrDefault("dataSources");
-    } else {
-      program.dataSources = [];
-      // continue asking user for data sources if none are initially provided
-      while (true) {
-        const firstQuestion = program.dataSources.length === 0;
-        const choices = getDataSourceChoices(
-          program.framework,
-          program.dataSources,
-          program.template,
-        );
-        if (choices.length === 0) break;
-        const { selectedSource } = await prompts(
-          {
-            type: "select",
-            name: "selectedSource",
-            message: firstQuestion
-              ? "Which data source would you like to use?"
-              : "Would you like to add another data source?",
-            choices,
-            initial: firstQuestion ? 1 : 0,
-          },
-          questionHandlers,
-        );
-
-        if (selectedSource === "no" || selectedSource === "none") {
-          // user doesn't want another data source or any data source
-          break;
-        }
-        switch (selectedSource) {
-          case "exampleFile": {
-            program.dataSources.push(EXAMPLE_FILE);
-            break;
-          }
-          case "file":
-          case "folder": {
-            const selectedPaths = await selectLocalContextData(selectedSource);
-            for (const p of selectedPaths) {
-              program.dataSources.push({
-                type: "file",
-                config: {
-                  path: p,
-                },
-              });
-            }
-            break;
-          }
-          case "web": {
-            const { baseUrl } = await prompts(
-              {
-                type: "text",
-                name: "baseUrl",
-                message: "Please provide base URL of the website: ",
-                initial: "https://www.llamaindex.ai",
-                validate: (value: string) => {
-                  if (!value.includes("://")) {
-                    value = `https://${value}`;
-                  }
-                  const urlObj = new URL(value);
-                  if (
-                    urlObj.protocol !== "https:" &&
-                    urlObj.protocol !== "http:"
-                  ) {
-                    return `URL=${value} has invalid protocol, only allow http or https`;
-                  }
-                  return true;
-                },
-              },
-              questionHandlers,
-            );
-
-            program.dataSources.push({
-              type: "web",
-              config: {
-                baseUrl,
-                prefix: baseUrl,
-                depth: 1,
-              },
-            });
-            break;
-          }
-          case "db": {
-            const dbPrompts: prompts.PromptObject<string>[] = [
-              {
-                type: "text",
-                name: "uri",
-                message:
-                  "Please enter the connection string (URI) for the database.",
-                initial: "mysql+pymysql://user:pass@localhost:3306/mydb",
-                validate: (value: string) => {
-                  if (!value) {
-                    return "Please provide a valid connection string";
-                  } else if (
-                    !(
-                      value.startsWith("mysql+pymysql://") ||
-                      value.startsWith("postgresql+psycopg://")
-                    )
-                  ) {
-                    return "The connection string must start with 'mysql+pymysql://' for MySQL or 'postgresql+psycopg://' for PostgreSQL";
-                  }
-                  return true;
-                },
-              },
-              // Only ask for a query, user can provide more complex queries in the config file later
-              {
-                type: (prev) => (prev ? "text" : null),
-                name: "queries",
-                message: "Please enter the SQL query to fetch data:",
-                initial: "SELECT * FROM mytable",
-              },
-            ];
-            program.dataSources.push({
-              type: "db",
-              config: await prompts(dbPrompts, questionHandlers),
-            });
-            break;
-          }
-          case "llamacloud": {
-            program.dataSources.push({
-              type: "llamacloud",
-              config: {},
-            });
-            program.dataSources.push(EXAMPLE_FILE);
-            break;
-          }
-        }
-      }
-    }
-  }
-
-  const isUsingLlamaCloud = program.dataSources.some(
-    (ds) => ds.type === "llamacloud",
-  );
-
-  // Asking for LlamaParse if user selected file data source
-  if (isUsingLlamaCloud) {
-    // default to use LlamaParse if using LlamaCloud
-    program.useLlamaParse = preferences.useLlamaParse = true;
-  } else {
-    // Extractor template doesn't support LlamaParse and LlamaCloud right now (cannot use asyncio loop in Reflex)
-    if (
-      program.useLlamaParse === undefined &&
-      program.template !== "extractor"
-    ) {
-      // if already set useLlamaParse, don't ask again
-      if (program.dataSources.some((ds) => ds.type === "file")) {
-        if (ciInfo.isCI) {
-          program.useLlamaParse = getPrefOrDefault("useLlamaParse");
-        } else {
-          const { useLlamaParse } = await prompts(
-            {
-              type: "toggle",
-              name: "useLlamaParse",
-              message:
-                "Would you like to use LlamaParse (improved parser for RAG - requires API key)?",
-              initial: false,
-              active: "yes",
-              inactive: "no",
-            },
-            questionHandlers,
-          );
-          program.useLlamaParse = useLlamaParse;
-          preferences.useLlamaParse = useLlamaParse;
-        }
-      }
-    }
-  }
-
-  // Ask for LlamaCloud API key when using a LlamaCloud index or LlamaParse
-  if (isUsingLlamaCloud || program.useLlamaParse) {
-    if (!program.llamaCloudKey) {
-      // if already set, don't ask again
-      if (ciInfo.isCI) {
-        program.llamaCloudKey = getPrefOrDefault("llamaCloudKey");
-      } else {
-        // Ask for LlamaCloud API key
-        const { llamaCloudKey } = await prompts(
-          {
-            type: "text",
-            name: "llamaCloudKey",
-            message:
-              "Please provide your LlamaCloud API key (leave blank to skip):",
-          },
-          questionHandlers,
-        );
-        program.llamaCloudKey = preferences.llamaCloudKey =
-          llamaCloudKey || process.env.LLAMA_CLOUD_API_KEY;
-      }
-    }
-  }
-
-  if (isUsingLlamaCloud) {
-    // When using a LlamaCloud index, don't ask for vector database and use code in `llamacloud` folder for vector database
-    const vectorDb = "llamacloud";
-    program.vectorDb = vectorDb;
-    preferences.vectorDb = vectorDb;
-  } else if (program.dataSources.length > 0 && !program.vectorDb) {
-    if (ciInfo.isCI) {
-      program.vectorDb = getPrefOrDefault("vectorDb");
-    } else {
-      const { vectorDb } = await prompts(
-        {
-          type: "select",
-          name: "vectorDb",
-          message: "Would you like to use a vector database?",
-          choices: getVectorDbChoices(program.framework),
-          initial: 0,
-        },
-        questionHandlers,
-      );
-      program.vectorDb = vectorDb;
-      preferences.vectorDb = vectorDb;
-    }
-  }
-
-  if (
-    !program.tools &&
-    (program.template === "streaming" || program.template === "multiagent")
-  ) {
-    if (ciInfo.isCI) {
-      program.tools = getPrefOrDefault("tools");
-    } else {
-      const options = supportedTools.filter((t) =>
-        t.supportedFrameworks?.includes(program.framework),
-      );
-      const toolChoices = options.map((tool) => ({
-        title: `${tool.display}${toolRequiresConfig(tool) ? " (needs configuration)" : ""}`,
-        value: tool.name,
-      }));
-      const { toolsName } = await prompts({
-        type: "multiselect",
-        name: "toolsName",
-        message:
-          "Would you like to build an agent using tools? If so, select the tools here, otherwise just press enter",
-        choices: toolChoices,
-      });
-      const tools = toolsName?.map((tool: string) =>
-        supportedTools.find((t) => t.name === tool),
-      );
-      program.tools = tools;
-      preferences.tools = tools;
-    }
-  }
-
-  await askPostInstallAction();
-};
-
-export const toChoice = (value: string) => {
-  return { title: value, value };
-};
diff --git a/questions/ci.ts b/questions/ci.ts
new file mode 100644
index 0000000000000000000000000000000000000000..c2438b5deacac0f6422950ce111798e07ed6975b
--- /dev/null
+++ b/questions/ci.ts
@@ -0,0 +1,30 @@
+import { askModelConfig } from "../helpers/providers";
+import { QuestionArgs, QuestionResults } from "./types";
+
+const defaults: Omit<QuestionArgs, "modelConfig"> = {
+  template: "streaming",
+  framework: "nextjs",
+  ui: "shadcn",
+  frontend: false,
+  llamaCloudKey: "",
+  useLlamaParse: false,
+  communityProjectConfig: undefined,
+  llamapack: "",
+  postInstallAction: "dependencies",
+  dataSources: [],
+  tools: [],
+};
+
+export async function getCIQuestionResults(
+  program: QuestionArgs,
+): Promise<QuestionResults> {
+  return {
+    ...defaults,
+    ...program,
+    modelConfig: await askModelConfig({
+      openAiKey: program.openAiKey,
+      askModels: false,
+      framework: program.framework,
+    }),
+  };
+}
diff --git a/questions/datasources.ts b/questions/datasources.ts
new file mode 100644
index 0000000000000000000000000000000000000000..db282af98e4a0d31351ed2d5e6a5748cfd61822f
--- /dev/null
+++ b/questions/datasources.ts
@@ -0,0 +1,64 @@
+import {
+  TemplateDataSource,
+  TemplateFramework,
+  TemplateType,
+} from "../helpers";
+import { supportedContextFileTypes } from "./utils";
+
+export const getDataSourceChoices = (
+  framework: TemplateFramework,
+  selectedDataSource: TemplateDataSource[],
+  template?: TemplateType,
+) => {
+  const choices = [];
+
+  if (selectedDataSource.length > 0) {
+    choices.push({
+      title: "No",
+      value: "no",
+    });
+  }
+  if (selectedDataSource === undefined || selectedDataSource.length === 0) {
+    choices.push({
+      title: "No datasource",
+      value: "none",
+    });
+    choices.push({
+      title:
+        process.platform !== "linux"
+          ? "Use an example PDF"
+          : "Use an example PDF (you can add your own data files later)",
+      value: "exampleFile",
+    });
+  }
+
+  // Linux has many distros so we won't support file/folder picker for now
+  if (process.platform !== "linux") {
+    choices.push(
+      {
+        title: `Use local files (${supportedContextFileTypes.join(", ")})`,
+        value: "file",
+      },
+      {
+        title:
+          process.platform === "win32"
+            ? "Use a local folder"
+            : "Use local folders",
+        value: "folder",
+      },
+    );
+  }
+
+  if (framework === "fastapi" && template !== "extractor") {
+    choices.push({
+      title: "Use website content (requires Chrome)",
+      value: "web",
+    });
+    choices.push({
+      title: "Use data from a database (Mysql, PostgreSQL)",
+      value: "db",
+    });
+  }
+
+  return choices;
+};
diff --git a/questions/index.ts b/questions/index.ts
new file mode 100644
index 0000000000000000000000000000000000000000..03b984e365fd7100cc1844b24ce69e227bf20803
--- /dev/null
+++ b/questions/index.ts
@@ -0,0 +1,18 @@
+import ciInfo from "ci-info";
+import { getCIQuestionResults } from "./ci";
+import { askProQuestions } from "./questions";
+import { askSimpleQuestions } from "./simple";
+import { QuestionArgs, QuestionResults } from "./types";
+
+export const askQuestions = async (
+  args: QuestionArgs,
+): Promise<QuestionResults> => {
+  if (ciInfo.isCI) {
+    return await getCIQuestionResults(args);
+  } else if (args.pro) {
+    // TODO: refactor pro questions to return a result object
+    await askProQuestions(args);
+    return args as unknown as QuestionResults;
+  }
+  return await askSimpleQuestions(args);
+};
diff --git a/questions/questions.ts b/questions/questions.ts
new file mode 100644
index 0000000000000000000000000000000000000000..e48d3c08050d0b7ab7322c58cb04ddadf51c869b
--- /dev/null
+++ b/questions/questions.ts
@@ -0,0 +1,400 @@
+import { blue, green } from "picocolors";
+import prompts from "prompts";
+import { COMMUNITY_OWNER, COMMUNITY_REPO } from "../helpers/constant";
+import { EXAMPLE_FILE } from "../helpers/datasources";
+import { getAvailableLlamapackOptions } from "../helpers/llama-pack";
+import { askModelConfig } from "../helpers/providers";
+import { getProjectOptions } from "../helpers/repo";
+import { supportedTools, toolRequiresConfig } from "../helpers/tools";
+import { getDataSourceChoices } from "./datasources";
+import { getVectorDbChoices } from "./stores";
+import { QuestionArgs } from "./types";
+import {
+  askPostInstallAction,
+  onPromptState,
+  questionHandlers,
+  selectLocalContextData,
+} from "./utils";
+
+export const askProQuestions = async (program: QuestionArgs) => {
+  if (!program.template) {
+    const styledRepo = blue(
+      `https://github.com/${COMMUNITY_OWNER}/${COMMUNITY_REPO}`,
+    );
+    const { template } = await prompts(
+      {
+        type: "select",
+        name: "template",
+        message: "Which template would you like to use?",
+        choices: [
+          { title: "Agentic RAG (e.g. chat with docs)", value: "streaming" },
+          {
+            title: "Multi-agent app (using workflows)",
+            value: "multiagent",
+          },
+          { title: "Structured Extractor", value: "extractor" },
+          {
+            title: `Community template from ${styledRepo}`,
+            value: "community",
+          },
+          {
+            title: "Example using a LlamaPack",
+            value: "llamapack",
+          },
+        ],
+        initial: 0,
+      },
+      questionHandlers,
+    );
+    program.template = template;
+  }
+
+  if (program.template === "community") {
+    const projectOptions = await getProjectOptions(
+      COMMUNITY_OWNER,
+      COMMUNITY_REPO,
+    );
+    const { communityProjectConfig } = await prompts(
+      {
+        type: "select",
+        name: "communityProjectConfig",
+        message: "Select community template",
+        choices: projectOptions.map(({ title, value }) => ({
+          title,
+          value: JSON.stringify(value), // serialize value to string in terminal
+        })),
+        initial: 0,
+      },
+      questionHandlers,
+    );
+    const projectConfig = JSON.parse(communityProjectConfig);
+    program.communityProjectConfig = projectConfig;
+    return; // early return - no further questions needed for community projects
+  }
+
+  if (program.template === "llamapack") {
+    const availableLlamaPacks = await getAvailableLlamapackOptions();
+    const { llamapack } = await prompts(
+      {
+        type: "select",
+        name: "llamapack",
+        message: "Select LlamaPack",
+        choices: availableLlamaPacks.map((pack) => ({
+          title: pack.name,
+          value: pack.folderPath,
+        })),
+        initial: 0,
+      },
+      questionHandlers,
+    );
+    program.llamapack = llamapack;
+    program.postInstallAction = await askPostInstallAction(program);
+    return; // early return - no further questions needed for llamapack projects
+  }
+
+  if (program.template === "extractor") {
+    // Extractor template only supports FastAPI, empty data sources, and llamacloud
+    // So we just use example file for extractor template, this allows user to choose vector database later
+    program.dataSources = [EXAMPLE_FILE];
+    program.framework = "fastapi";
+  }
+
+  if (!program.framework) {
+    const choices = [
+      { title: "NextJS", value: "nextjs" },
+      { title: "Express", value: "express" },
+      { title: "FastAPI (Python)", value: "fastapi" },
+    ];
+
+    const { framework } = await prompts(
+      {
+        type: "select",
+        name: "framework",
+        message: "Which framework would you like to use?",
+        choices,
+        initial: 0,
+      },
+      questionHandlers,
+    );
+    program.framework = framework;
+  }
+
+  if (
+    (program.framework === "express" || program.framework === "fastapi") &&
+    (program.template === "streaming" || program.template === "multiagent")
+  ) {
+    // if a backend-only framework is selected, ask whether we should create a frontend
+    if (program.frontend === undefined) {
+      const styledNextJS = blue("NextJS");
+      const styledBackend = green(
+        program.framework === "express"
+          ? "Express "
+          : program.framework === "fastapi"
+            ? "FastAPI (Python) "
+            : "",
+      );
+      const { frontend } = await prompts({
+        onState: onPromptState,
+        type: "toggle",
+        name: "frontend",
+        message: `Would you like to generate a ${styledNextJS} frontend for your ${styledBackend}backend?`,
+        initial: false,
+        active: "Yes",
+        inactive: "No",
+      });
+      program.frontend = Boolean(frontend);
+    }
+  } else {
+    program.frontend = false;
+  }
+
+  if (program.framework === "nextjs" || program.frontend) {
+    if (!program.ui) {
+      program.ui = "shadcn";
+    }
+  }
+
+  if (!program.observability && program.template === "streaming") {
+    const { observability } = await prompts(
+      {
+        type: "select",
+        name: "observability",
+        message: "Would you like to set up observability?",
+        choices: [
+          { title: "No", value: "none" },
+          ...(program.framework === "fastapi"
+            ? [{ title: "LlamaTrace", value: "llamatrace" }]
+            : []),
+          { title: "Traceloop", value: "traceloop" },
+        ],
+        initial: 0,
+      },
+      questionHandlers,
+    );
+
+    program.observability = observability;
+  }
+
+  if (!program.modelConfig) {
+    const modelConfig = await askModelConfig({
+      openAiKey: program.openAiKey,
+      askModels: program.askModels ?? false,
+      framework: program.framework,
+    });
+    program.modelConfig = modelConfig;
+  }
+
+  if (!program.vectorDb) {
+    const { vectorDb } = await prompts(
+      {
+        type: "select",
+        name: "vectorDb",
+        message: "Would you like to use a vector database?",
+        choices: getVectorDbChoices(program.framework),
+        initial: 0,
+      },
+      questionHandlers,
+    );
+    program.vectorDb = vectorDb;
+  }
+
+  if (program.vectorDb === "llamacloud") {
+    // When using a LlamaCloud index, don't ask for data sources just copy an example file
+    program.dataSources = [EXAMPLE_FILE];
+  }
+
+  if (!program.dataSources) {
+    program.dataSources = [];
+    // continue asking user for data sources if none are initially provided
+    while (true) {
+      const firstQuestion = program.dataSources.length === 0;
+      const choices = getDataSourceChoices(
+        program.framework,
+        program.dataSources,
+        program.template,
+      );
+      if (choices.length === 0) break;
+      const { selectedSource } = await prompts(
+        {
+          type: "select",
+          name: "selectedSource",
+          message: firstQuestion
+            ? "Which data source would you like to use?"
+            : "Would you like to add another data source?",
+          choices,
+          initial: firstQuestion ? 1 : 0,
+        },
+        questionHandlers,
+      );
+
+      if (selectedSource === "no" || selectedSource === "none") {
+        // user doesn't want another data source or any data source
+        break;
+      }
+      switch (selectedSource) {
+        case "exampleFile": {
+          program.dataSources.push(EXAMPLE_FILE);
+          break;
+        }
+        case "file":
+        case "folder": {
+          const selectedPaths = await selectLocalContextData(selectedSource);
+          for (const p of selectedPaths) {
+            program.dataSources.push({
+              type: "file",
+              config: {
+                path: p,
+              },
+            });
+          }
+          break;
+        }
+        case "web": {
+          const { baseUrl } = await prompts(
+            {
+              type: "text",
+              name: "baseUrl",
+              message: "Please provide base URL of the website: ",
+              initial: "https://www.llamaindex.ai",
+              validate: (value: string) => {
+                if (!value.includes("://")) {
+                  value = `https://${value}`;
+                }
+                const urlObj = new URL(value);
+                if (
+                  urlObj.protocol !== "https:" &&
+                  urlObj.protocol !== "http:"
+                ) {
+                  return `URL=${value} has invalid protocol, only allow http or https`;
+                }
+                return true;
+              },
+            },
+            questionHandlers,
+          );
+
+          program.dataSources.push({
+            type: "web",
+            config: {
+              baseUrl,
+              prefix: baseUrl,
+              depth: 1,
+            },
+          });
+          break;
+        }
+        case "db": {
+          const dbPrompts: prompts.PromptObject<string>[] = [
+            {
+              type: "text",
+              name: "uri",
+              message:
+                "Please enter the connection string (URI) for the database.",
+              initial: "mysql+pymysql://user:pass@localhost:3306/mydb",
+              validate: (value: string) => {
+                if (!value) {
+                  return "Please provide a valid connection string";
+                } else if (
+                  !(
+                    value.startsWith("mysql+pymysql://") ||
+                    value.startsWith("postgresql+psycopg://")
+                  )
+                ) {
+                  return "The connection string must start with 'mysql+pymysql://' for MySQL or 'postgresql+psycopg://' for PostgreSQL";
+                }
+                return true;
+              },
+            },
+            // Only ask for a query, user can provide more complex queries in the config file later
+            {
+              type: (prev) => (prev ? "text" : null),
+              name: "queries",
+              message: "Please enter the SQL query to fetch data:",
+              initial: "SELECT * FROM mytable",
+            },
+          ];
+          program.dataSources.push({
+            type: "db",
+            config: await prompts(dbPrompts, questionHandlers),
+          });
+          break;
+        }
+      }
+    }
+  }
+
+  const isUsingLlamaCloud = program.vectorDb === "llamacloud";
+
+  // Asking for LlamaParse if user selected file data source
+  if (isUsingLlamaCloud) {
+    // default to use LlamaParse if using LlamaCloud
+    program.useLlamaParse = true;
+  } else {
+    // Extractor template doesn't support LlamaParse and LlamaCloud right now (cannot use asyncio loop in Reflex)
+    if (
+      program.useLlamaParse === undefined &&
+      program.template !== "extractor"
+    ) {
+      // if already set useLlamaParse, don't ask again
+      if (program.dataSources.some((ds) => ds.type === "file")) {
+        const { useLlamaParse } = await prompts(
+          {
+            type: "toggle",
+            name: "useLlamaParse",
+            message:
+              "Would you like to use LlamaParse (improved parser for RAG - requires API key)?",
+            initial: false,
+            active: "Yes",
+            inactive: "No",
+          },
+          questionHandlers,
+        );
+        program.useLlamaParse = useLlamaParse;
+      }
+    }
+  }
+
+  // Ask for LlamaCloud API key when using a LlamaCloud index or LlamaParse
+  if (isUsingLlamaCloud || program.useLlamaParse) {
+    if (!program.llamaCloudKey) {
+      // if already set, don't ask again
+      // Ask for LlamaCloud API key
+      const { llamaCloudKey } = await prompts(
+        {
+          type: "text",
+          name: "llamaCloudKey",
+          message:
+            "Please provide your LlamaCloud API key (leave blank to skip):",
+        },
+        questionHandlers,
+      );
+      program.llamaCloudKey = llamaCloudKey || process.env.LLAMA_CLOUD_API_KEY;
+    }
+  }
+
+  if (
+    !program.tools &&
+    (program.template === "streaming" || program.template === "multiagent")
+  ) {
+    const options = supportedTools.filter((t) =>
+      t.supportedFrameworks?.includes(program.framework),
+    );
+    const toolChoices = options.map((tool) => ({
+      title: `${tool.display}${toolRequiresConfig(tool) ? " (needs configuration)" : ""}`,
+      value: tool.name,
+    }));
+    const { toolsName } = await prompts({
+      type: "multiselect",
+      name: "toolsName",
+      message:
+        "Would you like to build an agent using tools? If so, select the tools here, otherwise just press enter",
+      choices: toolChoices,
+    });
+    const tools = toolsName?.map((tool: string) =>
+      supportedTools.find((t) => t.name === tool),
+    );
+    program.tools = tools;
+  }
+
+  program.postInstallAction = await askPostInstallAction(program);
+};
diff --git a/questions/simple.ts b/questions/simple.ts
new file mode 100644
index 0000000000000000000000000000000000000000..486d2370b381a7735928f582100d1b7f0d6135af
--- /dev/null
+++ b/questions/simple.ts
@@ -0,0 +1,148 @@
+import prompts from "prompts";
+import { EXAMPLE_FILE } from "../helpers/datasources";
+import { askModelConfig } from "../helpers/providers";
+import { getTools } from "../helpers/tools";
+import { ModelConfig, TemplateFramework } from "../helpers/types";
+import { PureQuestionArgs, QuestionResults } from "./types";
+import { askPostInstallAction, questionHandlers } from "./utils";
+type AppType = "rag" | "code_artifact" | "multiagent" | "extractor";
+
+type SimpleAnswers = {
+  appType: AppType;
+  language: TemplateFramework;
+  useLlamaCloud: boolean;
+  llamaCloudKey?: string;
+  modelConfig: ModelConfig;
+};
+
+export const askSimpleQuestions = async (
+  args: PureQuestionArgs,
+): Promise<QuestionResults> => {
+  const { appType } = await prompts(
+    {
+      type: "select",
+      name: "appType",
+      message: "What app do you want to build?",
+      choices: [
+        { title: "Agentic RAG", value: "rag" },
+        { title: "Code Artifact Agent", value: "code_artifact" },
+        { title: "Multi-Agent Report Gen", value: "multiagent" },
+        { title: "Structured extraction", value: "extractor" },
+      ],
+    },
+    questionHandlers,
+  );
+
+  let language: TemplateFramework = "fastapi";
+  if (appType !== "extractor") {
+    const res = await prompts(
+      {
+        type: "select",
+        name: "language",
+        message: "What language do you want to use?",
+        choices: [
+          { title: "Python (FastAPI)", value: "fastapi" },
+          { title: "Typescript (NextJS)", value: "nextjs" },
+        ],
+      },
+      questionHandlers,
+    );
+    language = res.language;
+  }
+
+  const { useLlamaCloud } = await prompts(
+    {
+      type: "toggle",
+      name: "useLlamaCloud",
+      message: "Do you want to use LlamaCloud services?",
+      initial: false,
+      active: "Yes",
+      inactive: "No",
+      hint: "see https://www.llamaindex.ai/enterprise for more info",
+    },
+    questionHandlers,
+  );
+
+  let llamaCloudKey = args.llamaCloudKey;
+  if (useLlamaCloud && !llamaCloudKey) {
+    // Ask for LlamaCloud API key, if not set
+    const { llamaCloudKey: newLlamaCloudKey } = await prompts(
+      {
+        type: "text",
+        name: "llamaCloudKey",
+        message:
+          "Please provide your LlamaCloud API key (leave blank to skip):",
+      },
+      questionHandlers,
+    );
+    llamaCloudKey = newLlamaCloudKey || process.env.LLAMA_CLOUD_API_KEY;
+  }
+
+  const modelConfig = await askModelConfig({
+    openAiKey: args.openAiKey,
+    askModels: args.askModels ?? false,
+    framework: language,
+  });
+
+  const results = convertAnswers({
+    appType,
+    language,
+    useLlamaCloud,
+    llamaCloudKey,
+    modelConfig,
+  });
+
+  results.postInstallAction = await askPostInstallAction(results);
+  return results;
+};
+
+const convertAnswers = (answers: SimpleAnswers): QuestionResults => {
+  const lookup: Record<
+    AppType,
+    Pick<QuestionResults, "template" | "tools" | "frontend" | "dataSources">
+  > = {
+    rag: {
+      template: "streaming",
+      tools: getTools(["duckduckgo"]),
+      frontend: true,
+      dataSources: [EXAMPLE_FILE],
+    },
+    code_artifact: {
+      template: "streaming",
+      tools: getTools(["artifact"]),
+      frontend: true,
+      dataSources: [],
+    },
+    multiagent: {
+      template: "multiagent",
+      tools: getTools([
+        "document_generator",
+        "wikipedia.WikipediaToolSpec",
+        "duckduckgo",
+        "img_gen",
+      ]),
+      frontend: true,
+      dataSources: [EXAMPLE_FILE],
+    },
+    extractor: {
+      template: "extractor",
+      tools: [],
+      frontend: false,
+      dataSources: [EXAMPLE_FILE],
+    },
+  };
+  const results = lookup[answers.appType];
+  return {
+    framework: answers.language,
+    ui: "shadcn",
+    llamaCloudKey: answers.llamaCloudKey,
+    useLlamaParse: answers.useLlamaCloud,
+    llamapack: "",
+    postInstallAction: "none",
+    vectorDb: answers.useLlamaCloud ? "llamacloud" : "none",
+    modelConfig: answers.modelConfig,
+    observability: "none",
+    ...results,
+    frontend: answers.language === "nextjs" ? false : results.frontend,
+  };
+};
diff --git a/questions/stores.ts b/questions/stores.ts
new file mode 100644
index 0000000000000000000000000000000000000000..56367fb7a8d15ccb6c5bf51cfd44058fc3eb8609
--- /dev/null
+++ b/questions/stores.ts
@@ -0,0 +1,36 @@
+import fs from "fs";
+import path from "path";
+import { TemplateFramework } from "../helpers";
+import { templatesDir } from "../helpers/dir";
+
+export const getVectorDbChoices = (framework: TemplateFramework) => {
+  const choices = [
+    {
+      title: "No, just store the data in the file system",
+      value: "none",
+    },
+    { title: "MongoDB", value: "mongo" },
+    { title: "PostgreSQL", value: "pg" },
+    { title: "Pinecone", value: "pinecone" },
+    { title: "Milvus", value: "milvus" },
+    { title: "Astra", value: "astra" },
+    { title: "Qdrant", value: "qdrant" },
+    { title: "ChromaDB", value: "chroma" },
+    { title: "Weaviate", value: "weaviate" },
+    { title: "LlamaCloud (use Managed Index)", value: "llamacloud" },
+  ];
+
+  const vectordbLang = framework === "fastapi" ? "python" : "typescript";
+  const compPath = path.join(templatesDir, "components");
+  const vectordbPath = path.join(compPath, "vectordbs", vectordbLang);
+
+  const availableChoices = fs
+    .readdirSync(vectordbPath)
+    .filter((file) => fs.statSync(path.join(vectordbPath, file)).isDirectory());
+
+  const displayedChoices = choices.filter((choice) =>
+    availableChoices.includes(choice.value),
+  );
+
+  return displayedChoices;
+};
diff --git a/questions/types.ts b/questions/types.ts
new file mode 100644
index 0000000000000000000000000000000000000000..1ea45c186521734439b55fefc751e5f8d0f6dd92
--- /dev/null
+++ b/questions/types.ts
@@ -0,0 +1,15 @@
+import { InstallAppArgs } from "../create-app";
+
+export type QuestionResults = Omit<
+  InstallAppArgs,
+  "appPath" | "packageManager" | "externalPort"
+>;
+
+export type PureQuestionArgs = {
+  askModels?: boolean;
+  pro?: boolean;
+  openAiKey?: string;
+  llamaCloudKey?: string;
+};
+
+export type QuestionArgs = QuestionResults & PureQuestionArgs;
diff --git a/questions/utils.ts b/questions/utils.ts
new file mode 100644
index 0000000000000000000000000000000000000000..710cdf5dbb42189637c5f943d096acd8494950bd
--- /dev/null
+++ b/questions/utils.ts
@@ -0,0 +1,178 @@
+import { execSync } from "child_process";
+import fs from "fs";
+import path from "path";
+import { red } from "picocolors";
+import prompts from "prompts";
+import { TemplateDataSourceType, TemplatePostInstallAction } from "../helpers";
+import { toolsRequireConfig } from "../helpers/tools";
+import { QuestionResults } from "./types";
+
+export const supportedContextFileTypes = [
+  ".pdf",
+  ".doc",
+  ".docx",
+  ".xls",
+  ".xlsx",
+  ".csv",
+];
+
+const MACOS_FILE_SELECTION_SCRIPT = `
+osascript -l JavaScript -e '
+  a = Application.currentApplication();
+  a.includeStandardAdditions = true;
+  a.chooseFile({ withPrompt: "Please select files to process:", multipleSelectionsAllowed: true }).map(file => file.toString())
+'`;
+
+const MACOS_FOLDER_SELECTION_SCRIPT = `
+osascript -l JavaScript -e '
+  a = Application.currentApplication();
+  a.includeStandardAdditions = true;
+  a.chooseFolder({ withPrompt: "Please select folders to process:", multipleSelectionsAllowed: true }).map(folder => folder.toString())
+'`;
+
+const WINDOWS_FILE_SELECTION_SCRIPT = `
+Add-Type -AssemblyName System.Windows.Forms
+$openFileDialog = New-Object System.Windows.Forms.OpenFileDialog
+$openFileDialog.InitialDirectory = [Environment]::GetFolderPath('Desktop')
+$openFileDialog.Multiselect = $true
+$result = $openFileDialog.ShowDialog()
+if ($result -eq 'OK') {
+  $openFileDialog.FileNames
+}
+`;
+
+const WINDOWS_FOLDER_SELECTION_SCRIPT = `
+Add-Type -AssemblyName System.windows.forms
+$folderBrowser = New-Object System.Windows.Forms.FolderBrowserDialog
+$dialogResult = $folderBrowser.ShowDialog()
+if ($dialogResult -eq [System.Windows.Forms.DialogResult]::OK)
+{
+    $folderBrowser.SelectedPath
+}
+`;
+
+export const selectLocalContextData = async (type: TemplateDataSourceType) => {
+  try {
+    let selectedPath: string = "";
+    let execScript: string;
+    let execOpts: any = {};
+    switch (process.platform) {
+      case "win32": // Windows
+        execScript =
+          type === "file"
+            ? WINDOWS_FILE_SELECTION_SCRIPT
+            : WINDOWS_FOLDER_SELECTION_SCRIPT;
+        execOpts = { shell: "powershell.exe" };
+        break;
+      case "darwin": // MacOS
+        execScript =
+          type === "file"
+            ? MACOS_FILE_SELECTION_SCRIPT
+            : MACOS_FOLDER_SELECTION_SCRIPT;
+        break;
+      default: // Unsupported OS
+        console.log(red("Unsupported OS error!"));
+        process.exit(1);
+    }
+    selectedPath = execSync(execScript, execOpts).toString().trim();
+    const paths =
+      process.platform === "win32"
+        ? selectedPath.split("\r\n")
+        : selectedPath.split(", ");
+
+    for (const p of paths) {
+      if (
+        fs.statSync(p).isFile() &&
+        !supportedContextFileTypes.includes(path.extname(p))
+      ) {
+        console.log(
+          red(
+            `Please select a supported file type: ${supportedContextFileTypes}`,
+          ),
+        );
+        process.exit(1);
+      }
+    }
+    return paths;
+  } catch (error) {
+    console.log(
+      red(
+        "Got an error when trying to select local context data! Please try again or select another data source option.",
+      ),
+    );
+    process.exit(1);
+  }
+};
+
+export const onPromptState = (state: any) => {
+  if (state.aborted) {
+    // If we don't re-enable the terminal cursor before exiting
+    // the program, the cursor will remain hidden
+    process.stdout.write("\x1B[?25h");
+    process.stdout.write("\n");
+    process.exit(1);
+  }
+};
+
+export const toChoice = (value: string) => {
+  return { title: value, value };
+};
+
+export const questionHandlers = {
+  onCancel: () => {
+    console.error("Exiting.");
+    process.exit(1);
+  },
+};
+
+// Ask for next action after installation
+export async function askPostInstallAction(
+  args: QuestionResults,
+): Promise<TemplatePostInstallAction> {
+  const actionChoices = [
+    {
+      title: "Just generate code (~1 sec)",
+      value: "none",
+    },
+    {
+      title: "Start in VSCode (~1 sec)",
+      value: "VSCode",
+    },
+    {
+      title: "Generate code and install dependencies (~2 min)",
+      value: "dependencies",
+    },
+  ];
+
+  const modelConfigured = !args.llamapack && args.modelConfig.isConfigured();
+  // If using LlamaParse, require LlamaCloud API key
+  const llamaCloudKeyConfigured = args.useLlamaParse
+    ? args.llamaCloudKey || process.env["LLAMA_CLOUD_API_KEY"]
+    : true;
+  const hasVectorDb = args.vectorDb && args.vectorDb !== "none";
+  // Can run the app if all tools do not require configuration
+  if (
+    !hasVectorDb &&
+    modelConfigured &&
+    llamaCloudKeyConfigured &&
+    !toolsRequireConfig(args.tools)
+  ) {
+    actionChoices.push({
+      title: "Generate code, install dependencies, and run the app (~2 min)",
+      value: "runApp",
+    });
+  }
+
+  const { action } = await prompts(
+    {
+      type: "select",
+      name: "action",
+      message: "How would you like to proceed?",
+      choices: actionChoices,
+      initial: 1,
+    },
+    questionHandlers,
+  );
+
+  return action;
+}
diff --git a/tsconfig.json b/tsconfig.json
index e85fc126ec4abb16fe99642ec8c2f62df8e1b5ff..7e95b5bec41bfba9ce7a708903ea972bf75c7076 100644
--- a/tsconfig.json
+++ b/tsconfig.json
@@ -18,7 +18,7 @@
     "create-app.ts",
     "index.ts",
     "./helpers",
-    "questions.ts",
+    "./questions",
     "package.json",
     "types/**/*"
   ],