diff --git a/semantic_router/encoders/mistral.py b/semantic_router/encoders/mistral.py index c76f1e74a1e86ede34f87ca6857c17dd71487ccd..cf1e290a0f88b90a75c9604e63a71e7475fd5f69 100644 --- a/semantic_router/encoders/mistral.py +++ b/semantic_router/encoders/mistral.py @@ -1,4 +1,5 @@ """This file contains the MistralEncoder class which is used to encode text using MistralAI""" + import os from time import sleep from typing import List, Optional diff --git a/semantic_router/encoders/zure.py b/semantic_router/encoders/zure.py index 8d523e8e331ee751ff8380d7277af7a072ed53f2..ee1b1fa67fdf917b8c9113b1e89fcd7e82dd6486 100644 --- a/semantic_router/encoders/zure.py +++ b/semantic_router/encoders/zure.py @@ -66,9 +66,9 @@ class AzureOpenAIEncoder(BaseEncoder): try: self.client = openai.AzureOpenAI( - azure_deployment=str(self.deployment_name) - if self.deployment_name - else None, + azure_deployment=( + str(self.deployment_name) if self.deployment_name else None + ), api_key=str(self.api_key), azure_endpoint=str(self.azure_endpoint), api_version=str(self.api_version), diff --git a/semantic_router/layer.py b/semantic_router/layer.py index dffc1d4b4af2671019ed0cfe47f7f67698612799..bf49a4258b143af81d4857759bfbc910e3749093 100644 --- a/semantic_router/layer.py +++ b/semantic_router/layer.py @@ -111,9 +111,9 @@ class LayerConfig: llm_class = getattr(llm_module, llm_data["class"]) # Instantiate the LLM class with the provided model name llm = llm_class(name=llm_data["model"]) - route_data[ - "llm" - ] = llm # Reassign the instantiated llm object back to route_data + route_data["llm"] = ( + llm # Reassign the instantiated llm object back to route_data + ) # Dynamically create the Route object using the remaining route_data route = Route(**route_data) diff --git a/semantic_router/splitters/consecutive_sim.py b/semantic_router/splitters/consecutive_sim.py index 775d5d2c9b36f6a5c48e94eae6eb37dcf245c7c6..897baae40d5ef1055bf7fc1c7253756b7944b6d6 100644 --- a/semantic_router/splitters/consecutive_sim.py +++ b/semantic_router/splitters/consecutive_sim.py @@ -8,7 +8,6 @@ from semantic_router.splitters.base import BaseSplitter class ConsecutiveSimSplitter(BaseSplitter): - """ Called "consecutive sim splitter" because we check the similarities of consecutive document embeddings (compare ith to i+1th document embedding). """ diff --git a/tests/unit/test_hybrid_layer.py b/tests/unit/test_hybrid_layer.py index 29643e958dd029f93e8b99c8ae99635598811ce9..d489650903241a81039bfbbcc58d8bdd86ad5842 100644 --- a/tests/unit/test_hybrid_layer.py +++ b/tests/unit/test_hybrid_layer.py @@ -81,7 +81,11 @@ sparse_encoder.fit(["The quick brown fox", "jumps over the lazy dog", "Hello, wo class TestHybridRouteLayer: def test_initialization(self, openai_encoder, routes): route_layer = HybridRouteLayer( - encoder=openai_encoder, sparse_encoder=sparse_encoder, routes=routes, top_k=10, alpha=0.8, + encoder=openai_encoder, + sparse_encoder=sparse_encoder, + routes=routes, + top_k=10, + alpha=0.8, ) assert route_layer.index is not None and route_layer.categories is not None assert openai_encoder.score_threshold == 0.82 diff --git a/tests/unit/test_splitters.py b/tests/unit/test_splitters.py index 5ee28504dad622cd8cb52c62553f651a5739e990..19434325a768dd816d01b925efb2548824f8cbc7 100644 --- a/tests/unit/test_splitters.py +++ b/tests/unit/test_splitters.py @@ -47,10 +47,8 @@ def test_cumulative_sim_splitter(): # Adjust the side_effect to simulate the encoder's behavior for cumulative document comparisons # This simplistic simulation assumes binary embeddings for demonstration purposes # Define a side_effect function for the mock encoder - mock_encoder.side_effect = ( - lambda x: [[0.5, 0]] - if "doc1" in x or "doc1\ndoc2" in x or "doc2" in x - else [[0, 0.5]] + mock_encoder.side_effect = lambda x: ( + [[0.5, 0]] if "doc1" in x or "doc1\ndoc2" in x or "doc2" in x else [[0, 0.5]] ) # Instantiate the CumulativeSimSplitter with the mock encoder @@ -112,8 +110,8 @@ def test_split_by_topic_consecutive_similarity(): def test_split_by_topic_cumulative_similarity(): mock_encoder = Mock() - mock_encoder.side_effect = ( - lambda x: [[0.5, 0]] if "User: What is the latest news?" in x else [[0, 0.5]] + mock_encoder.side_effect = lambda x: ( + [[0.5, 0]] if "User: What is the latest news?" in x else [[0, 0.5]] ) messages = [