From 465c24e8ec28afb6fe0e6fbcd6af79ee264f2b15 Mon Sep 17 00:00:00 2001 From: Logan <logan.markewich@live.com> Date: Fri, 17 May 2024 14:26:10 -0600 Subject: [PATCH] chore: remove duplicate achat (#13564) --- .../llama_index/llms/ollama/base.py | 42 ------------------- .../llama-index-llms-ollama/pyproject.toml | 2 +- 2 files changed, 1 insertion(+), 43 deletions(-) diff --git a/llama-index-integrations/llms/llama-index-llms-ollama/llama_index/llms/ollama/base.py b/llama-index-integrations/llms/llama-index-llms-ollama/llama_index/llms/ollama/base.py index f0848a89d..80079faee 100644 --- a/llama-index-integrations/llms/llama-index-llms-ollama/llama_index/llms/ollama/base.py +++ b/llama-index-integrations/llms/llama-index-llms-ollama/llama_index/llms/ollama/base.py @@ -147,48 +147,6 @@ class Ollama(CustomLLM): additional_kwargs=get_additional_kwargs(raw, ("message",)), ) - @llm_chat_callback() - async def achat( - self, messages: Sequence[ChatMessage], **kwargs: Any - ) -> ChatResponse: - payload = { - "model": self.model, - "messages": [ - { - "role": message.role.value, - "content": message.content, - **message.additional_kwargs, - } - for message in messages - ], - "options": self._model_kwargs, - "stream": False, - **kwargs, - } - - if self.json_mode: - payload["format"] = "json" - - async with httpx.AsyncClient(timeout=Timeout(self.request_timeout)) as client: - response = await client.post( - url=f"{self.base_url}/api/chat", - json=payload, - ) - response.raise_for_status() - raw = response.json() - message = raw["message"] - return ChatResponse( - message=ChatMessage( - content=message.get("content"), - role=MessageRole(message.get("role")), - additional_kwargs=get_additional_kwargs( - message, ("content", "role") - ), - ), - raw=raw, - additional_kwargs=get_additional_kwargs(raw, ("message",)), - ) - @llm_chat_callback() def stream_chat( self, messages: Sequence[ChatMessage], **kwargs: Any diff --git a/llama-index-integrations/llms/llama-index-llms-ollama/pyproject.toml b/llama-index-integrations/llms/llama-index-llms-ollama/pyproject.toml index 6f086a988..da6ac04d6 100644 --- a/llama-index-integrations/llms/llama-index-llms-ollama/pyproject.toml +++ b/llama-index-integrations/llms/llama-index-llms-ollama/pyproject.toml @@ -27,7 +27,7 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-llms-ollama" readme = "README.md" -version = "0.1.3" +version = "0.1.4" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" -- GitLab