From 7f14e47f5654107b4ba14a6421f3fe56b3f318c1 Mon Sep 17 00:00:00 2001 From: Huu Le <39040748+leehuwuj@users.noreply.github.com> Date: Mon, 18 Nov 2024 16:41:45 +0700 Subject: [PATCH] feat: Improve CI (#431) --- e2e/utils.ts | 4 ---- helpers/providers/openai.ts | 3 ++- questions/index.ts | 4 +++- questions/questions.ts | 3 ++- templates/types/streaming/fastapi/app/services/file.py | 6 ++---- 5 files changed, 9 insertions(+), 11 deletions(-) diff --git a/e2e/utils.ts b/e2e/utils.ts index a835f0f2..799daf49 100644 --- a/e2e/utils.ts +++ b/e2e/utils.ts @@ -90,8 +90,6 @@ export async function runCreateLlama({ ...dataSourceArgs, "--vector-db", vectorDb, - "--open-ai-key", - process.env.OPENAI_API_KEY, "--use-pnpm", "--port", port, @@ -103,8 +101,6 @@ export async function runCreateLlama({ tools ?? "none", "--observability", "none", - "--llama-cloud-key", - process.env.LLAMA_CLOUD_API_KEY, ]; if (templateUI) { diff --git a/helpers/providers/openai.ts b/helpers/providers/openai.ts index 74f9a42b..c26ff4c4 100644 --- a/helpers/providers/openai.ts +++ b/helpers/providers/openai.ts @@ -3,6 +3,7 @@ import ora from "ora"; import { red } from "picocolors"; import prompts from "prompts"; import { ModelConfigParams, ModelConfigQuestionsParams } from "."; +import { isCI } from "../../questions"; import { questionHandlers } from "../../questions/utils"; const OPENAI_API_URL = "https://api.openai.com/v1"; @@ -30,7 +31,7 @@ export async function askOpenAIQuestions({ }, }; - if (!config.apiKey) { + if (!config.apiKey && !isCI) { const { key } = await prompts( { type: "text", diff --git a/questions/index.ts b/questions/index.ts index 7de3c9f6..725aeba9 100644 --- a/questions/index.ts +++ b/questions/index.ts @@ -4,10 +4,12 @@ import { askProQuestions } from "./questions"; import { askSimpleQuestions } from "./simple"; import { QuestionArgs, QuestionResults } from "./types"; +export const isCI = ciInfo.isCI || process.env.PLAYWRIGHT_TEST === "1"; + export const askQuestions = async ( args: QuestionArgs, ): Promise<QuestionResults> => { - if (ciInfo.isCI || process.env.PLAYWRIGHT_TEST === "1") { + if (isCI) { return await getCIQuestionResults(args); } else if (args.pro) { // TODO: refactor pro questions to return a result object diff --git a/questions/questions.ts b/questions/questions.ts index 45c262f4..c152362f 100644 --- a/questions/questions.ts +++ b/questions/questions.ts @@ -1,5 +1,6 @@ import { blue, green } from "picocolors"; import prompts from "prompts"; +import { isCI } from "."; import { COMMUNITY_OWNER, COMMUNITY_REPO } from "../helpers/constant"; import { EXAMPLE_FILE } from "../helpers/datasources"; import { getAvailableLlamapackOptions } from "../helpers/llama-pack"; @@ -386,7 +387,7 @@ export const askProQuestions = async (program: QuestionArgs) => { // Ask for LlamaCloud API key when using a LlamaCloud index or LlamaParse if (isUsingLlamaCloud || program.useLlamaParse) { - if (!program.llamaCloudKey) { + if (!program.llamaCloudKey && !isCI) { // if already set, don't ask again // Ask for LlamaCloud API key const { llamaCloudKey } = await prompts( diff --git a/templates/types/streaming/fastapi/app/services/file.py b/templates/types/streaming/fastapi/app/services/file.py index b9de026e..7aa6696c 100644 --- a/templates/types/streaming/fastapi/app/services/file.py +++ b/templates/types/streaming/fastapi/app/services/file.py @@ -242,13 +242,11 @@ class FileService: except ImportError as e: raise ValueError("LlamaCloudFileService is not found") from e - project_id = index._get_project_id() - pipeline_id = index._get_pipeline_id() # LlamaCloudIndex is a managed index so we can directly use the files upload_file = (file_name, BytesIO(file_data)) doc_id = LLamaCloudFileService.add_file_to_pipeline( - project_id, - pipeline_id, + index.project.id, + index.pipeline.id, upload_file, custom_metadata={}, ) -- GitLab