diff --git a/llama-index-integrations/llms/llama-index-llms-mistralai/llama_index/llms/mistralai/base.py b/llama-index-integrations/llms/llama-index-llms-mistralai/llama_index/llms/mistralai/base.py index 536363ff4caca2872de6b57fc467ad780d0de94b..1943d1a3f3720fb4f3a19bc5f58b1f815a6ce495 100644 --- a/llama-index-integrations/llms/llama-index-llms-mistralai/llama_index/llms/mistralai/base.py +++ b/llama-index-integrations/llms/llama-index-llms-mistralai/llama_index/llms/mistralai/base.py @@ -324,7 +324,7 @@ class MistralAI(FunctionCallingLLM): messages = to_mistral_chatmessage(messages) all_kwargs = self._get_all_kwargs(**kwargs) - response = await self._aclient.chat_stream(messages=messages, **all_kwargs) + response = self._aclient.chat_stream(messages=messages, **all_kwargs) async def gen() -> ChatResponseAsyncGen: content = "" diff --git a/llama-index-integrations/llms/llama-index-llms-mistralai/pyproject.toml b/llama-index-integrations/llms/llama-index-llms-mistralai/pyproject.toml index 70acb9160a3bc157eed75ec39fcf4bd1177f73ad..517d836ceadbed31fd255dc8018c2f636b6527dc 100644 --- a/llama-index-integrations/llms/llama-index-llms-mistralai/pyproject.toml +++ b/llama-index-integrations/llms/llama-index-llms-mistralai/pyproject.toml @@ -27,7 +27,7 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-llms-mistralai" readme = "README.md" -version = "0.1.11" +version = "0.1.12" [tool.poetry.dependencies] python = ">=3.9,<4.0"