Skip to content
Snippets Groups Projects
Unverified Commit 465c24e8 authored by Logan's avatar Logan Committed by GitHub
Browse files

chore: remove duplicate achat (#13564)

parent 024c418f
Branches
Tags
No related merge requests found
......@@ -147,48 +147,6 @@ class Ollama(CustomLLM):
additional_kwargs=get_additional_kwargs(raw, ("message",)),
)
@llm_chat_callback()
async def achat(
self, messages: Sequence[ChatMessage], **kwargs: Any
) -> ChatResponse:
payload = {
"model": self.model,
"messages": [
{
"role": message.role.value,
"content": message.content,
**message.additional_kwargs,
}
for message in messages
],
"options": self._model_kwargs,
"stream": False,
**kwargs,
}
if self.json_mode:
payload["format"] = "json"
async with httpx.AsyncClient(timeout=Timeout(self.request_timeout)) as client:
response = await client.post(
url=f"{self.base_url}/api/chat",
json=payload,
)
response.raise_for_status()
raw = response.json()
message = raw["message"]
return ChatResponse(
message=ChatMessage(
content=message.get("content"),
role=MessageRole(message.get("role")),
additional_kwargs=get_additional_kwargs(
message, ("content", "role")
),
),
raw=raw,
additional_kwargs=get_additional_kwargs(raw, ("message",)),
)
@llm_chat_callback()
def stream_chat(
self, messages: Sequence[ChatMessage], **kwargs: Any
......
......@@ -27,7 +27,7 @@ exclude = ["**/BUILD"]
license = "MIT"
name = "llama-index-llms-ollama"
readme = "README.md"
version = "0.1.3"
version = "0.1.4"
[tool.poetry.dependencies]
python = ">=3.8.1,<4.0"
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment