From a25e7b716a5c0a1ae2e07637be05680f5a604052 Mon Sep 17 00:00:00 2001
From: Massimiliano Pippi <mpippi@gmail.com>
Date: Thu, 27 Feb 2025 04:44:20 +0100
Subject: [PATCH] fix: tools param cannot be Null when calling Anthropic
 Messages API (#17928)

---
 .../llama_index/llms/anthropic/base.py        | 43 ++++++++++---------
 .../llama-index-llms-anthropic/pyproject.toml |  2 +-
 .../tests/test_llms_anthropic.py              | 15 +++++--
 3 files changed, 34 insertions(+), 26 deletions(-)

diff --git a/llama-index-integrations/llms/llama-index-llms-anthropic/llama_index/llms/anthropic/base.py b/llama-index-integrations/llms/llama-index-llms-anthropic/llama_index/llms/anthropic/base.py
index afdd294509..81a8c6ac16 100644
--- a/llama-index-integrations/llms/llama-index-llms-anthropic/llama_index/llms/anthropic/base.py
+++ b/llama-index-integrations/llms/llama-index-llms-anthropic/llama_index/llms/anthropic/base.py
@@ -1,15 +1,6 @@
-import anthropic
 import json
-from anthropic.types import (
-    ContentBlockDeltaEvent,
-    TextBlock,
-    TextDelta,
-    ContentBlockStartEvent,
-    ContentBlockStopEvent,
-)
-from anthropic.types.tool_use_block import ToolUseBlock
-
 from typing import (
+    TYPE_CHECKING,
     Any,
     Callable,
     Dict,
@@ -18,9 +9,14 @@ from typing import (
     Sequence,
     Tuple,
     Union,
-    TYPE_CHECKING,
 )
 
+from llama_index.core.base.llms.generic_utils import (
+    achat_to_completion_decorator,
+    astream_chat_to_completion_decorator,
+    chat_to_completion_decorator,
+    stream_chat_to_completion_decorator,
+)
 from llama_index.core.base.llms.types import (
     ChatMessage,
     ChatResponse,
@@ -39,22 +35,26 @@ from llama_index.core.llms.callbacks import (
     llm_chat_callback,
     llm_completion_callback,
 )
-from llama_index.core.base.llms.generic_utils import (
-    achat_to_completion_decorator,
-    astream_chat_to_completion_decorator,
-    chat_to_completion_decorator,
-    stream_chat_to_completion_decorator,
-)
 from llama_index.core.llms.function_calling import FunctionCallingLLM, ToolSelection
+from llama_index.core.llms.utils import parse_partial_json
 from llama_index.core.types import BaseOutputParser, PydanticProgramMode
+from llama_index.core.utils import Tokenizer
 from llama_index.llms.anthropic.utils import (
     anthropic_modelname_to_contextsize,
     force_single_tool_call,
     is_function_calling_model,
     messages_to_anthropic_messages,
 )
-from llama_index.core.utils import Tokenizer
-from llama_index.core.llms.utils import parse_partial_json
+
+import anthropic
+from anthropic.types import (
+    ContentBlockDeltaEvent,
+    ContentBlockStartEvent,
+    ContentBlockStopEvent,
+    TextBlock,
+    TextDelta,
+)
+from anthropic.types.tool_use_block import ToolUseBlock
 
 if TYPE_CHECKING:
     from llama_index.core.tools.types import BaseTool
@@ -78,7 +78,8 @@ class AnthropicTokenizer:
 
 
 class Anthropic(FunctionCallingLLM):
-    """Anthropic LLM.
+    """
+    Anthropic LLM.
 
     Examples:
         `pip install llama-index-llms-anthropic`
@@ -474,7 +475,7 @@ class Anthropic(FunctionCallingLLM):
                     "input_schema": tool.metadata.get_parameters_dict(),
                 }
             )
-        return {"messages": chat_history, "tools": tool_dicts or None, **kwargs}
+        return {"messages": chat_history, "tools": tool_dicts, **kwargs}
 
     def _validate_chat_with_tools_response(
         self,
diff --git a/llama-index-integrations/llms/llama-index-llms-anthropic/pyproject.toml b/llama-index-integrations/llms/llama-index-llms-anthropic/pyproject.toml
index a8fa1c0a39..51bc9a6d93 100644
--- a/llama-index-integrations/llms/llama-index-llms-anthropic/pyproject.toml
+++ b/llama-index-integrations/llms/llama-index-llms-anthropic/pyproject.toml
@@ -27,7 +27,7 @@ exclude = ["**/BUILD"]
 license = "MIT"
 name = "llama-index-llms-anthropic"
 readme = "README.md"
-version = "0.6.6"
+version = "0.6.7"
 
 [tool.poetry.dependencies]
 python = ">=3.9,<4.0"
diff --git a/llama-index-integrations/llms/llama-index-llms-anthropic/tests/test_llms_anthropic.py b/llama-index-integrations/llms/llama-index-llms-anthropic/tests/test_llms_anthropic.py
index 834ea24cac..95bf01f812 100644
--- a/llama-index-integrations/llms/llama-index-llms-anthropic/tests/test_llms_anthropic.py
+++ b/llama-index-integrations/llms/llama-index-llms-anthropic/tests/test_llms_anthropic.py
@@ -1,10 +1,11 @@
-from llama_index.core.base.llms.base import BaseLLM
-from llama_index.llms.anthropic import Anthropic
-from llama_index.core.llms import ChatMessage
 import os
-import pytest
 from unittest.mock import MagicMock
 
+import pytest
+from llama_index.core.base.llms.base import BaseLLM
+from llama_index.core.llms import ChatMessage
+from llama_index.llms.anthropic import Anthropic
+
 
 def test_text_inference_embedding_class():
     names_of_base_classes = [b.__name__ for b in Anthropic.__mro__]
@@ -203,3 +204,9 @@ def test_anthropic_tokenizer():
         messages=[{"role": "user", "content": test_text}],
         model="claude-3-5-sonnet-20241022",
     )
+
+
+def test__prepare_chat_with_tools_empty():
+    llm = Anthropic()
+    retval = llm._prepare_chat_with_tools(tools=[])
+    assert retval["tools"] == []
-- 
GitLab