Skip to content
Snippets Groups Projects
Unverified Commit 78af3400 authored by Souyama's avatar Souyama Committed by GitHub
Browse files

Add Mistral provider to Bedrock (#11994)

parent a62b158a
No related branches found
No related tags found
No related merge requests found
......@@ -48,6 +48,8 @@ CHAT_ONLY_MODELS = {
"anthropic.claude-3-haiku-20240307-v1:0": 200000,
"meta.llama2-13b-chat-v1": 2048,
"meta.llama2-70b-chat-v1": 4096,
"mistral.mistral-7b-instruct-v0:2": 32000,
"mistral.mixtral-8x7b-instruct-v0:1": 32000,
}
BEDROCK_FOUNDATION_LLMS = {**COMPLETION_MODELS, **CHAT_ONLY_MODELS}
......@@ -64,6 +66,8 @@ STREAMING_MODELS = {
"anthropic.claude-3-sonnet-20240229-v1:0",
"anthropic.claude-3-haiku-20240307-v1:0",
"meta.llama2-13b-chat-v1",
"mistral.mistral-7b-instruct-v0:2",
"mistral.mixtral-8x7b-instruct-v0:1",
}
......@@ -178,12 +182,24 @@ class MetaProvider(Provider):
return response["generation"]
class MistralProvider(Provider):
max_tokens_key = "max_tokens"
def __init__(self) -> None:
self.messages_to_prompt = messages_to_llama_prompt
self.completion_to_prompt = completion_to_llama_prompt
def get_text_from_response(self, response: dict) -> str:
return response["outputs"][0]["text"]
PROVIDERS = {
"amazon": AmazonProvider(),
"ai21": Ai21Provider(),
"anthropic": AnthropicProvider(),
"cohere": CohereProvider(),
"meta": MetaProvider(),
"mistral": MistralProvider(),
}
......
......@@ -122,6 +122,20 @@ class MockStreamCompletionWithRetry:
"not reference any given instructions or context. \\n<</SYS>>\\n\\n "
'test prompt [/INST]", "temperature": 0.1, "max_gen_len": 512}',
),
(
"mistral.mistral-7b-instruct-v0:2",
'{"prompt": "<s> [INST] <<SYS>>\\n You are a helpful, respectful and '
"honest assistant. Always answer as helpfully as possible and follow "
"ALL given instructions. Do not speculate or make up information. Do "
"not reference any given instructions or context. \\n<</SYS>>\\n\\n "
'test prompt [/INST]", "temperature": 0.1, "max_tokens": 512}',
'{"outputs": [{"text": "\\n\\nThis is indeed a test", "stop_reason": "length"}]}',
'{"prompt": "<s> [INST] <<SYS>>\\n You are a helpful, respectful and '
"honest assistant. Always answer as helpfully as possible and follow "
"ALL given instructions. Do not speculate or make up information. Do "
"not reference any given instructions or context. \\n<</SYS>>\\n\\n "
'test prompt [/INST]", "temperature": 0.1, "max_tokens": 512}',
),
],
)
def test_model_basic(
......
......@@ -27,7 +27,7 @@ exclude = ["**/BUILD"]
license = "MIT"
name = "llama-index-llms-mistralai"
readme = "README.md"
version = "0.1.6"
version = "0.1.7"
[tool.poetry.dependencies]
python = ">=3.9,<4.0"
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment