diff --git a/.changeset/bright-snails-drive.md b/.changeset/bright-snails-drive.md new file mode 100644 index 0000000000000000000000000000000000000000..66a9a0b0129f7d5a11a3860d0952a15b4791ff5c --- /dev/null +++ b/.changeset/bright-snails-drive.md @@ -0,0 +1,5 @@ +--- +"create-llama": patch +--- + +Configure LlamaCloud organization ID for Python diff --git a/helpers/env-variables.ts b/helpers/env-variables.ts index ef232834e96dacc78ff1e9f0188ea0f6e8394bbb..bb04a75392aa52cf4cd39dd5ccdf0477a26f0f7c 100644 --- a/helpers/env-variables.ts +++ b/helpers/env-variables.ts @@ -155,6 +155,11 @@ const getVectorDBEnvs = ( "The base URL for the LlamaCloud API. Only change this for non-production environments", value: "https://api.cloud.llamaindex.ai", }, + { + name: "LLAMA_CLOUD_ORGANIZATION_ID", + description: + "The organization ID for the LlamaCloud project (uses default organization if not specified - Python only)", + }, ]; case "chroma": const envs = [ diff --git a/helpers/python.ts b/helpers/python.ts index 3092bdbd2e309881008a2bd8b97139ef91dc88a0..095c1a0a2aa1f240726227f8fafffb9ccbc25243 100644 --- a/helpers/python.ts +++ b/helpers/python.ts @@ -121,7 +121,7 @@ const getAdditionalDependencies = ( case "llamacloud": dependencies.push({ name: "llama-index-indices-managed-llama-cloud", - version: "^0.2.5", + version: "^0.2.7", }); break; } diff --git a/templates/components/vectordbs/python/llamacloud/generate.py b/templates/components/vectordbs/python/llamacloud/generate.py index f494941aeb16d8b6126d969c12268ec0c1e449bb..8bcf60689e0ce85214106bbeda1ba3fe4692174e 100644 --- a/templates/components/vectordbs/python/llamacloud/generate.py +++ b/templates/components/vectordbs/python/llamacloud/generate.py @@ -21,6 +21,7 @@ def generate_datasource(): project_name = os.getenv("LLAMA_CLOUD_PROJECT_NAME") api_key = os.getenv("LLAMA_CLOUD_API_KEY") base_url = os.getenv("LLAMA_CLOUD_BASE_URL") + organization_id = os.getenv("LLAMA_CLOUD_ORGANIZATION_ID") if name is None or project_name is None or api_key is None: raise ValueError( @@ -40,6 +41,7 @@ def generate_datasource(): project_name=project_name, api_key=api_key, base_url=base_url, + organization_id=organization_id ) logger.info("Finished generating the index") diff --git a/templates/components/vectordbs/python/llamacloud/index.py b/templates/components/vectordbs/python/llamacloud/index.py index 160a20c69c500dfc66e32fe3d81b565a90d819ab..da73434f25b83b4531fedcdeb208e38a0c1d048e 100644 --- a/templates/components/vectordbs/python/llamacloud/index.py +++ b/templates/components/vectordbs/python/llamacloud/index.py @@ -11,6 +11,7 @@ def get_index(): project_name = os.getenv("LLAMA_CLOUD_PROJECT_NAME") api_key = os.getenv("LLAMA_CLOUD_API_KEY") base_url = os.getenv("LLAMA_CLOUD_BASE_URL") + organization_id = os.getenv("LLAMA_CLOUD_ORGANIZATION_ID") if name is None or project_name is None or api_key is None: raise ValueError( @@ -23,6 +24,7 @@ def get_index(): project_name=project_name, api_key=api_key, base_url=base_url, + organization_id=organization_id ) return index diff --git a/templates/types/streaming/fastapi/pyproject.toml b/templates/types/streaming/fastapi/pyproject.toml index c669162e06bd743c53770f287c8617bad6989540..e897faffccb5f931390245f6c626bd9b10177a0a 100644 --- a/templates/types/streaming/fastapi/pyproject.toml +++ b/templates/types/streaming/fastapi/pyproject.toml @@ -14,7 +14,7 @@ fastapi = "^0.109.1" uvicorn = { extras = ["standard"], version = "^0.23.2" } python-dotenv = "^1.0.0" aiostream = "^0.5.2" -llama-index = "0.10.55" +llama-index = "0.10.58" cachetools = "^5.3.3" [build-system]