From 596006471bae9cc23bb06271d8d1a5fdab48a3d8 Mon Sep 17 00:00:00 2001 From: Siraj R Aizlewood <siraj@aurelio.ai> Date: Tue, 13 Feb 2024 23:09:17 +0400 Subject: [PATCH] Linting. --- semantic_router/layer.py | 25 +++++++++++++++++-------- semantic_router/route.py | 10 +++++----- 2 files changed, 22 insertions(+), 13 deletions(-) diff --git a/semantic_router/layer.py b/semantic_router/layer.py index 35f51405..153d0607 100644 --- a/semantic_router/layer.py +++ b/semantic_router/layer.py @@ -15,6 +15,7 @@ from semantic_router.schema import Encoder, EncoderType, RouteChoice from semantic_router.utils.logger import logger import importlib + def is_valid(layer_config: str) -> bool: """Make sure the given string is json format and contains the 3 keys: ["encoder_name", "encoder_type", "routes"]""" try: @@ -85,7 +86,9 @@ class LayerConfig: elif ext in [".yaml", ".yml"]: layer = yaml.safe_load(f) else: - raise ValueError("Unsupported file type. Only .json and .yaml are supported") + raise ValueError( + "Unsupported file type. Only .json and .yaml are supported" + ) if not is_valid(json.dumps(layer)): raise Exception("Invalid config JSON or YAML") @@ -95,22 +98,28 @@ class LayerConfig: routes = [] for route_data in layer["routes"]: # Handle the 'llm' field specially if it exists - if 'llm' in route_data: - llm_data = route_data.pop('llm') # Remove 'llm' from route_data and handle it separately + if "llm" in route_data: + llm_data = route_data.pop( + "llm" + ) # Remove 'llm' from route_data and handle it separately # Use the module path directly from llm_data without modification - llm_module_path = llm_data['module'] + llm_module_path = llm_data["module"] # Dynamically import the module and then the class from that module llm_module = importlib.import_module(llm_module_path) - llm_class = getattr(llm_module, llm_data['class']) + llm_class = getattr(llm_module, llm_data["class"]) # Instantiate the LLM class with the provided model name - llm = llm_class(name=llm_data['model']) - route_data['llm'] = llm # Reassign the instantiated llm object back to route_data + llm = llm_class(name=llm_data["model"]) + route_data[ + "llm" + ] = llm # Reassign the instantiated llm object back to route_data # Dynamically create the Route object using the remaining route_data route = Route(**route_data) routes.append(route) - return cls(encoder_type=encoder_type, encoder_name=encoder_name, routes=routes) + return cls( + encoder_type=encoder_type, encoder_name=encoder_name, routes=routes + ) def to_dict(self) -> Dict[str, Any]: return { diff --git a/semantic_router/route.py b/semantic_router/route.py index 1b08547a..a1e0e9b0 100644 --- a/semantic_router/route.py +++ b/semantic_router/route.py @@ -70,14 +70,14 @@ class Route(BaseModel): # def to_dict(self) -> Dict[str, Any]: # return self.dict() - + def to_dict(self) -> Dict[str, Any]: data = self.dict() if self.llm is not None: - data['llm'] = { - 'module': self.llm.__module__, - 'class': self.llm.__class__.__name__, - 'model': self.llm.name + data["llm"] = { + "module": self.llm.__module__, + "class": self.llm.__class__.__name__, + "model": self.llm.name, } return data -- GitLab