From 16c05f2282e8744015daa40a7365c61049061fb6 Mon Sep 17 00:00:00 2001 From: Siraj R Aizlewood <siraj@aurelio.ai> Date: Wed, 21 Feb 2024 17:41:10 +0400 Subject: [PATCH] Linting. --- tests/unit/llms/test_llm_ollama.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/tests/unit/llms/test_llm_ollama.py b/tests/unit/llms/test_llm_ollama.py index cf4c328e..29998982 100644 --- a/tests/unit/llms/test_llm_ollama.py +++ b/tests/unit/llms/test_llm_ollama.py @@ -2,10 +2,12 @@ import pytest from semantic_router.llms.ollama import OllamaLLM from semantic_router.schema import Message + @pytest.fixture def ollama_llm(): return OllamaLLM() + class TestOllamaLLM: def test_ollama_llm_init_success(self, ollama_llm): assert ollama_llm.name == "ollama" @@ -17,13 +19,13 @@ class TestOllamaLLM: def test_ollama_llm_call_success(self, ollama_llm, mocker): mock_response = mocker.MagicMock() mock_response.json.return_value = {"message": {"content": "test response"}} - mocker.patch('requests.post', return_value=mock_response) - + mocker.patch("requests.post", return_value=mock_response) + output = ollama_llm([Message(role="user", content="test")]) assert output == "test response" def test_ollama_llm_error_handling(self, ollama_llm, mocker): - mocker.patch('requests.post', side_effect=Exception("LLM error")) + mocker.patch("requests.post", side_effect=Exception("LLM error")) with pytest.raises(Exception) as exc_info: ollama_llm([Message(role="user", content="test")]) - assert "LLM error" in str(exc_info.value) \ No newline at end of file + assert "LLM error" in str(exc_info.value) -- GitLab