Skip to content
Snippets Groups Projects
Unverified Commit defdfd8f authored by Souyama's avatar Souyama Committed by GitHub
Browse files

Fix: Anthropic LLM merge consecutive messages with same role (#12013)

parent 732e443b
No related branches found
No related tags found
No related merge requests found
...@@ -2,6 +2,8 @@ from typing import Dict, Sequence, Tuple ...@@ -2,6 +2,8 @@ from typing import Dict, Sequence, Tuple
from llama_index.core.base.llms.types import ChatMessage, MessageRole from llama_index.core.base.llms.types import ChatMessage, MessageRole
from anthropic.types import MessageParam, TextBlockParam
HUMAN_PREFIX = "\n\nHuman:" HUMAN_PREFIX = "\n\nHuman:"
ASSISTANT_PREFIX = "\n\nAssistant:" ASSISTANT_PREFIX = "\n\nAssistant:"
...@@ -27,18 +29,49 @@ def anthropic_modelname_to_contextsize(modelname: str) -> int: ...@@ -27,18 +29,49 @@ def anthropic_modelname_to_contextsize(modelname: str) -> int:
return CLAUDE_MODELS[modelname] return CLAUDE_MODELS[modelname]
def __merge_common_role_msgs(
messages: Sequence[MessageParam],
) -> Sequence[MessageParam]:
"""Merge consecutive messages with the same role."""
postprocessed_messages: Sequence[MessageParam] = []
for message in messages:
if (
postprocessed_messages
and postprocessed_messages[-1]["role"] == message["role"]
):
postprocessed_messages[-1]["content"] += message["content"]
else:
postprocessed_messages.append(message)
return postprocessed_messages
def messages_to_anthropic_messages( def messages_to_anthropic_messages(
messages: Sequence[ChatMessage], messages: Sequence[ChatMessage],
) -> Tuple[Sequence[ChatMessage], str]: ) -> Tuple[Sequence[MessageParam], str]:
"""Converts a list of generic ChatMessages to anthropic messages.
Args:
messages: List of ChatMessages
Returns:
Tuple of:
- List of anthropic messages
- System prompt
"""
anthropic_messages = [] anthropic_messages = []
system_prompt = "" system_prompt = ""
for message in messages: for message in messages:
if message.role == MessageRole.SYSTEM: if message.role == MessageRole.SYSTEM:
system_prompt = message.content system_prompt = message.content
else: else:
message = {"role": message.role.value, "content": message.content} message = MessageParam(
role=message.role.value,
content=[
TextBlockParam(text=message.content, type="text")
], # TODO: type detect for multimodal
)
anthropic_messages.append(message) anthropic_messages.append(message)
return anthropic_messages, system_prompt return __merge_common_role_msgs(anthropic_messages), system_prompt
# Function used in bedrock # Function used in bedrock
......
...@@ -27,7 +27,7 @@ exclude = ["**/BUILD"] ...@@ -27,7 +27,7 @@ exclude = ["**/BUILD"]
license = "MIT" license = "MIT"
name = "llama-index-llms-anthropic" name = "llama-index-llms-anthropic"
readme = "README.md" readme = "README.md"
version = "0.1.6" version = "0.1.7"
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = ">=3.8.1,<4.0" python = ">=3.8.1,<4.0"
......
...@@ -27,12 +27,12 @@ exclude = ["**/BUILD"] ...@@ -27,12 +27,12 @@ exclude = ["**/BUILD"]
license = "MIT" license = "MIT"
name = "llama-index-llms-bedrock" name = "llama-index-llms-bedrock"
readme = "README.md" readme = "README.md"
version = "0.1.4" version = "0.1.5"
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = ">=3.8.1,<4.0" python = ">=3.8.1,<4.0"
llama-index-core = "^0.10.1" llama-index-core = "^0.10.1"
llama-index-llms-anthropic = "^0.1.6" llama-index-llms-anthropic = "^0.1.7"
boto3 = "^1.34.26" boto3 = "^1.34.26"
[tool.poetry.group.dev.dependencies] [tool.poetry.group.dev.dependencies]
......
...@@ -102,10 +102,10 @@ class MockStreamCompletionWithRetry: ...@@ -102,10 +102,10 @@ class MockStreamCompletionWithRetry:
), ),
( (
"anthropic.claude-instant-v1", "anthropic.claude-instant-v1",
'{"messages": [{"role": "user", "content": "test prompt"}], "anthropic_version": "bedrock-2023-05-31", ' '{"messages": [{"role": "user", "content": [{"text": "test prompt", "type": "text"}]}], "anthropic_version": "bedrock-2023-05-31", '
'"temperature": 0.1, "max_tokens": 512}', '"temperature": 0.1, "max_tokens": 512}',
'{"content": [{"text": "\\n\\nThis is indeed a test", "type": "text"}]}', '{"content": [{"text": "\\n\\nThis is indeed a test", "type": "text"}]}',
'{"messages": [{"role": "user", "content": "test prompt"}], "anthropic_version": "bedrock-2023-05-31", ' '{"messages": [{"role": "user", "content": [{"text": "test prompt", "type": "text"}]}], "anthropic_version": "bedrock-2023-05-31", '
'"temperature": 0.1, "max_tokens": 512}', '"temperature": 0.1, "max_tokens": 512}',
), ),
( (
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment