Skip to content
Snippets Groups Projects
Unverified Commit 16c05f22 authored by Siraj R Aizlewood's avatar Siraj R Aizlewood
Browse files

Linting.

parent 11f39e25
No related branches found
No related tags found
No related merge requests found
...@@ -2,10 +2,12 @@ import pytest ...@@ -2,10 +2,12 @@ import pytest
from semantic_router.llms.ollama import OllamaLLM from semantic_router.llms.ollama import OllamaLLM
from semantic_router.schema import Message from semantic_router.schema import Message
@pytest.fixture @pytest.fixture
def ollama_llm(): def ollama_llm():
return OllamaLLM() return OllamaLLM()
class TestOllamaLLM: class TestOllamaLLM:
def test_ollama_llm_init_success(self, ollama_llm): def test_ollama_llm_init_success(self, ollama_llm):
assert ollama_llm.name == "ollama" assert ollama_llm.name == "ollama"
...@@ -17,13 +19,13 @@ class TestOllamaLLM: ...@@ -17,13 +19,13 @@ class TestOllamaLLM:
def test_ollama_llm_call_success(self, ollama_llm, mocker): def test_ollama_llm_call_success(self, ollama_llm, mocker):
mock_response = mocker.MagicMock() mock_response = mocker.MagicMock()
mock_response.json.return_value = {"message": {"content": "test response"}} mock_response.json.return_value = {"message": {"content": "test response"}}
mocker.patch('requests.post', return_value=mock_response) mocker.patch("requests.post", return_value=mock_response)
output = ollama_llm([Message(role="user", content="test")]) output = ollama_llm([Message(role="user", content="test")])
assert output == "test response" assert output == "test response"
def test_ollama_llm_error_handling(self, ollama_llm, mocker): def test_ollama_llm_error_handling(self, ollama_llm, mocker):
mocker.patch('requests.post', side_effect=Exception("LLM error")) mocker.patch("requests.post", side_effect=Exception("LLM error"))
with pytest.raises(Exception) as exc_info: with pytest.raises(Exception) as exc_info:
ollama_llm([Message(role="user", content="test")]) ollama_llm([Message(role="user", content="test")])
assert "LLM error" in str(exc_info.value) assert "LLM error" in str(exc_info.value)
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment