diff --git a/docs/docs/examples/llm/anthropic.ipynb b/docs/docs/examples/llm/anthropic.ipynb
index 8479462e702466366b75274901c364f70a72d732..57b4374727e7ee4e83f2a428ff0f3c78c891a4d8 100644
--- a/docs/docs/examples/llm/anthropic.ipynb
+++ b/docs/docs/examples/llm/anthropic.ipynb
@@ -59,7 +59,7 @@
     "\n",
     "First we want to set the tokenizer, which is slightly different than TikToken.\n",
     "\n",
-    "**NOTE**: The Claude 3 tokenizer has not been updated yet; using the existing Anthropic tokenizer leads to context overflow errors for 200k tokens. We've temporarily set the max tokens for Claude 3 to 180k."
+    "**NOTE**: Anthropic recently updated their token counting API. Older models like claude-2.1 are no longer supported for token counting in the latest versions of the Anthropic python client."
    ]
   },
   {
diff --git a/llama-index-integrations/llms/llama-index-llms-anthropic/llama_index/llms/anthropic/base.py b/llama-index-integrations/llms/llama-index-llms-anthropic/llama_index/llms/anthropic/base.py
index f0873d0d1a20198a1c3530651e2d33e47dd4392b..d967aa2c4d6fff351bb645cc6fd72c0029b34de5 100644
--- a/llama-index-integrations/llms/llama-index-llms-anthropic/llama_index/llms/anthropic/base.py
+++ b/llama-index-integrations/llms/llama-index-llms-anthropic/llama_index/llms/anthropic/base.py
@@ -210,7 +210,13 @@ class Anthropic(FunctionCallingLLM):
 
     @property
     def tokenizer(self) -> Tokenizer:
-        return self._client.get_tokenizer()
+        def _count_tokens(text: str) -> int:
+            return self._client.beta.messages.count_tokens(
+                messages=[{"role": "user", "content": text}],
+                model=self.model,
+            ).input_tokens
+
+        return _count_tokens
 
     @property
     def _model_kwargs(self) -> Dict[str, Any]:
diff --git a/llama-index-integrations/llms/llama-index-llms-anthropic/pyproject.toml b/llama-index-integrations/llms/llama-index-llms-anthropic/pyproject.toml
index 1ca5abc348c9844c84dbafdd2d6908b127b6c9c0..6e87ed717b0fb0058dc056c93b00d4e70afd0f7b 100644
--- a/llama-index-integrations/llms/llama-index-llms-anthropic/pyproject.toml
+++ b/llama-index-integrations/llms/llama-index-llms-anthropic/pyproject.toml
@@ -27,11 +27,11 @@ exclude = ["**/BUILD"]
 license = "MIT"
 name = "llama-index-llms-anthropic"
 readme = "README.md"
-version = "0.3.9"
+version = "0.4.0"
 
 [tool.poetry.dependencies]
 python = ">=3.8.1,<4.0"
-anthropic = {extras = ["bedrock", "vertex"], version = ">=0.34.2"}
+anthropic = {extras = ["bedrock", "vertex"], version = ">=0.39.0"}
 llama-index-core = "^0.11.0"
 
 [tool.poetry.group.dev.dependencies]