Skip to content
Snippets Groups Projects
Unverified Commit b38a0b75 authored by Siraj R Aizlewood's avatar Siraj R Aizlewood
Browse files

Linting

parent d4d29a45
No related branches found
No related tags found
No related merge requests found
import os
from typing import List, Optional from typing import List, Optional
import requests import requests
import json
from semantic_router.llms import BaseLLM from semantic_router.llms import BaseLLM
from semantic_router.schema import Message from semantic_router.schema import Message
...@@ -29,14 +27,13 @@ class OllamaLLM(BaseLLM): ...@@ -29,14 +27,13 @@ class OllamaLLM(BaseLLM):
self.stream = stream self.stream = stream
def __call__( def __call__(
self, self,
messages: List[Message], messages: List[Message],
temperature: Optional[float] = None, temperature: Optional[float] = None,
llm_name: Optional[str] = None, llm_name: Optional[str] = None,
max_tokens: Optional[int] = None, max_tokens: Optional[int] = None,
stream: Optional[bool] = None stream: Optional[bool] = None,
) -> str: ) -> str:
# Use instance defaults if not overridden # Use instance defaults if not overridden
temperature = temperature if temperature is not None else self.temperature temperature = temperature if temperature is not None else self.temperature
llm_name = llm_name if llm_name is not None else self.llm_name llm_name = llm_name if llm_name is not None else self.llm_name
...@@ -47,19 +44,16 @@ class OllamaLLM(BaseLLM): ...@@ -47,19 +44,16 @@ class OllamaLLM(BaseLLM):
payload = { payload = {
"model": llm_name, "model": llm_name,
"messages": [m.to_openai() for m in messages], "messages": [m.to_openai() for m in messages],
"options": { "options": {"temperature": temperature, "num_predict": max_tokens},
"temperature": temperature,
"num_predict": max_tokens
},
"format": "json", "format": "json",
"stream": stream "stream": stream,
} }
response = requests.post("http://localhost:11434/api/chat", json=payload) response = requests.post("http://localhost:11434/api/chat", json=payload)
output = response.json()["message"]["content"] output = response.json()["message"]["content"]
return output return output
except Exception as e: except Exception as e:
logger.error(f"LLM error: {e}") logger.error(f"LLM error: {e}")
raise Exception(f"LLM error: {e}") from e raise Exception(f"LLM error: {e}") from e
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment