diff --git a/e2e/utils.ts b/e2e/utils.ts
index a835f0f20f8699791b07f186f5f8229ed1fb2da1..799daf49ff05688f8ad23d371811ccd8860a2717 100644
--- a/e2e/utils.ts
+++ b/e2e/utils.ts
@@ -90,8 +90,6 @@ export async function runCreateLlama({
     ...dataSourceArgs,
     "--vector-db",
     vectorDb,
-    "--open-ai-key",
-    process.env.OPENAI_API_KEY,
     "--use-pnpm",
     "--port",
     port,
@@ -103,8 +101,6 @@ export async function runCreateLlama({
     tools ?? "none",
     "--observability",
     "none",
-    "--llama-cloud-key",
-    process.env.LLAMA_CLOUD_API_KEY,
   ];
 
   if (templateUI) {
diff --git a/helpers/providers/openai.ts b/helpers/providers/openai.ts
index 74f9a42bc85ad910172d0fc83a9f2bdc27d2d686..c26ff4c4f7fff16ef9c2cbccf0d8cf21ac5f0cd9 100644
--- a/helpers/providers/openai.ts
+++ b/helpers/providers/openai.ts
@@ -3,6 +3,7 @@ import ora from "ora";
 import { red } from "picocolors";
 import prompts from "prompts";
 import { ModelConfigParams, ModelConfigQuestionsParams } from ".";
+import { isCI } from "../../questions";
 import { questionHandlers } from "../../questions/utils";
 
 const OPENAI_API_URL = "https://api.openai.com/v1";
@@ -30,7 +31,7 @@ export async function askOpenAIQuestions({
     },
   };
 
-  if (!config.apiKey) {
+  if (!config.apiKey && !isCI) {
     const { key } = await prompts(
       {
         type: "text",
diff --git a/questions/index.ts b/questions/index.ts
index 7de3c9f658022e1424f552d6449d64a0adc2feb6..725aeba9dec0ff51bb8dec54a03c3169ddc60b4f 100644
--- a/questions/index.ts
+++ b/questions/index.ts
@@ -4,10 +4,12 @@ import { askProQuestions } from "./questions";
 import { askSimpleQuestions } from "./simple";
 import { QuestionArgs, QuestionResults } from "./types";
 
+export const isCI = ciInfo.isCI || process.env.PLAYWRIGHT_TEST === "1";
+
 export const askQuestions = async (
   args: QuestionArgs,
 ): Promise<QuestionResults> => {
-  if (ciInfo.isCI || process.env.PLAYWRIGHT_TEST === "1") {
+  if (isCI) {
     return await getCIQuestionResults(args);
   } else if (args.pro) {
     // TODO: refactor pro questions to return a result object
diff --git a/questions/questions.ts b/questions/questions.ts
index 45c262f49b6bbb10df690f2affa0404e31cbaff7..c152362f8350b35c0fa10260c605d5e483489055 100644
--- a/questions/questions.ts
+++ b/questions/questions.ts
@@ -1,5 +1,6 @@
 import { blue, green } from "picocolors";
 import prompts from "prompts";
+import { isCI } from ".";
 import { COMMUNITY_OWNER, COMMUNITY_REPO } from "../helpers/constant";
 import { EXAMPLE_FILE } from "../helpers/datasources";
 import { getAvailableLlamapackOptions } from "../helpers/llama-pack";
@@ -386,7 +387,7 @@ export const askProQuestions = async (program: QuestionArgs) => {
 
   // Ask for LlamaCloud API key when using a LlamaCloud index or LlamaParse
   if (isUsingLlamaCloud || program.useLlamaParse) {
-    if (!program.llamaCloudKey) {
+    if (!program.llamaCloudKey && !isCI) {
       // if already set, don't ask again
       // Ask for LlamaCloud API key
       const { llamaCloudKey } = await prompts(
diff --git a/templates/types/streaming/fastapi/app/services/file.py b/templates/types/streaming/fastapi/app/services/file.py
index b9de026e2c3ea6e75c88af70c911539eec722981..7aa6696c3a484506ab83acd8f88b4c55343c1377 100644
--- a/templates/types/streaming/fastapi/app/services/file.py
+++ b/templates/types/streaming/fastapi/app/services/file.py
@@ -242,13 +242,11 @@ class FileService:
         except ImportError as e:
             raise ValueError("LlamaCloudFileService is not found") from e
 
-        project_id = index._get_project_id()
-        pipeline_id = index._get_pipeline_id()
         # LlamaCloudIndex is a managed index so we can directly use the files
         upload_file = (file_name, BytesIO(file_data))
         doc_id = LLamaCloudFileService.add_file_to_pipeline(
-            project_id,
-            pipeline_id,
+            index.project.id,
+            index.pipeline.id,
             upload_file,
             custom_metadata={},
         )