From 81b67794effe615345601f2bdfed76258eb98980 Mon Sep 17 00:00:00 2001 From: Marcus Schiesser <mail@marcusschiesser.de> Date: Mon, 29 Jul 2024 14:44:35 +0200 Subject: [PATCH] fix: throw errors if azure deployments are no --- .changeset/smooth-points-float.md | 2 +- templates/components/settings/python/settings.py | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.changeset/smooth-points-float.md b/.changeset/smooth-points-float.md index f0882f52..78aa703a 100644 --- a/.changeset/smooth-points-float.md +++ b/.changeset/smooth-points-float.md @@ -2,4 +2,4 @@ "create-llama": patch --- -Add azure model provider +Add Azure OpenAI as model provider diff --git a/templates/components/settings/python/settings.py b/templates/components/settings/python/settings.py index 4d50429c..b723bf3e 100644 --- a/templates/components/settings/python/settings.py +++ b/templates/components/settings/python/settings.py @@ -75,15 +75,15 @@ def init_azure_openai(): from llama_index.embeddings.azure_openai import AzureOpenAIEmbedding from llama_index.llms.azure_openai import AzureOpenAI - llm_deployment = os.getenv("AZURE_OPENAI_LLM_DEPLOYMENT") - embedding_deployment = os.getenv("AZURE_OPENAI_EMBEDDING_DEPLOYMENT") + llm_deployment = os.environ["AZURE_OPENAI_LLM_DEPLOYMENT"] + embedding_deployment = os.environ["AZURE_OPENAI_EMBEDDING_DEPLOYMENT"] max_tokens = os.getenv("LLM_MAX_TOKENS") temperature = os.getenv("LLM_TEMPERATURE", DEFAULT_TEMPERATURE) dimensions = os.getenv("EMBEDDING_DIM") azure_config = { - "api_key": os.getenv("AZURE_OPENAI_KEY"), - "azure_endpoint": os.getenv("AZURE_OPENAI_ENDPOINT"), + "api_key": os.environ["AZURE_OPENAI_KEY"], + "azure_endpoint": os.environ["AZURE_OPENAI_ENDPOINT"], "api_version": os.getenv("AZURE_OPENAI_API_VERSION") or os.getenv("OPENAI_API_VERSION"), } -- GitLab