From f17b066ffa644169a46bb47c4369e653c8689fc1 Mon Sep 17 00:00:00 2001 From: Thorsten Klein <tk@thklein.io> Date: Fri, 15 Mar 2024 16:32:12 +0100 Subject: [PATCH] fix (llms/openai): don't try to get logprobs from None content (#11968) --- .../llama-index-llms-openai/llama_index/llms/openai/base.py | 2 +- .../llms/llama-index-llms-openai/pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/llama-index-integrations/llms/llama-index-llms-openai/llama_index/llms/openai/base.py b/llama-index-integrations/llms/llama-index-llms-openai/llama_index/llms/openai/base.py index a23d01509..ae0bcae46 100644 --- a/llama-index-integrations/llms/llama-index-llms-openai/llama_index/llms/openai/base.py +++ b/llama-index-integrations/llms/llama-index-llms-openai/llama_index/llms/openai/base.py @@ -329,7 +329,7 @@ class OpenAI(LLM): message = from_openai_message(openai_message) openai_token_logprobs = response.choices[0].logprobs logprobs = None - if openai_token_logprobs: + if openai_token_logprobs and openai_token_logprobs.content: logprobs = from_openai_token_logprobs(openai_token_logprobs.content) return ChatResponse( diff --git a/llama-index-integrations/llms/llama-index-llms-openai/pyproject.toml b/llama-index-integrations/llms/llama-index-llms-openai/pyproject.toml index 51577e0b7..010b96ae1 100644 --- a/llama-index-integrations/llms/llama-index-llms-openai/pyproject.toml +++ b/llama-index-integrations/llms/llama-index-llms-openai/pyproject.toml @@ -29,7 +29,7 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-llms-openai" readme = "README.md" -version = "0.1.10" +version = "0.1.11" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" -- GitLab