diff --git a/semantic_router/layer.py b/semantic_router/layer.py index bd48f77f1f35eb260543448a9fd0fedfb62aaa13..dae040a5d7537f0f22f0c7edeeb035f58e7eb0fb 100644 --- a/semantic_router/layer.py +++ b/semantic_router/layer.py @@ -55,7 +55,7 @@ class LayerConfig: def __init__( self, routes: list[Route] = [], - encoder_type: EncoderType = "openai", + encoder_type: str = "openai", encoder_name: str | None = None, ): self.encoder_type = encoder_type @@ -184,18 +184,18 @@ class RouteLayer: @classmethod def from_json(cls, file_path: str): config = LayerConfig.from_file(file_path) - encoder = Encoder(type=config.encoder_type, name=config.encoder_name) + encoder = Encoder(type=config.encoder_type, name=config.encoder_name).model return cls(encoder=encoder, routes=config.routes) @classmethod def from_yaml(cls, file_path: str): config = LayerConfig.from_file(file_path) - encoder = Encoder(type=config.encoder_type, name=config.encoder_name) + encoder = Encoder(type=config.encoder_type, name=config.encoder_name).model return cls(encoder=encoder, routes=config.routes) @classmethod def from_config(cls, config: LayerConfig): - encoder = Encoder(type=config.encoder_type, name=config.encoder_name) + encoder = Encoder(type=config.encoder_type, name=config.encoder_name).model return cls(encoder=encoder, routes=config.routes) def add(self, route: Route): diff --git a/semantic_router/route.py b/semantic_router/route.py index 1fa3291a56d3d44fc6ea98f8944c03bd6a084027..06ebf8f39a6ad6b7f5fb71f298ad691621925a50 100644 --- a/semantic_router/route.py +++ b/semantic_router/route.py @@ -113,7 +113,7 @@ class Route(BaseModel): {function_schema} """ - output = await llm(prompt) + output = llm(prompt) if not output: raise Exception("No output generated for dynamic route") diff --git a/semantic_router/schema.py b/semantic_router/schema.py index a3d786dba8653f4a30bc50f4592e793d1b4e23b1..63233322207be40b140eb3413879523652fa25b1 100644 --- a/semantic_router/schema.py +++ b/semantic_router/schema.py @@ -23,12 +23,12 @@ class RouteChoice(BaseModel): @dataclass class Encoder: - type: EncoderType - name: str + type: str + name: str | None model: BaseEncoder - def __init__(self, type: str, name: str): - self.type = EncoderType(type) + def __init__(self, type: str, name: str | None): + self.type = type self.name = name if self.type == EncoderType.HUGGINGFACE: raise NotImplementedError @@ -36,6 +36,8 @@ class Encoder: self.model = OpenAIEncoder(name) elif self.type == EncoderType.COHERE: self.model = CohereEncoder(name) + else: + raise NotImplementedError def __call__(self, texts: list[str]) -> list[list[float]]: return self.model(texts) diff --git a/semantic_router/utils/function_call.py b/semantic_router/utils/function_call.py index 9504dfb8aff5049ea875dfa5070837074f199c8c..2ead3ab58dd5c54eaf26fdc5b2f73ea95f4bef9c 100644 --- a/semantic_router/utils/function_call.py +++ b/semantic_router/utils/function_call.py @@ -117,11 +117,11 @@ async def route_and_execute(query: str, functions: list[Callable], route_layer): function_name = route_layer(query) if not function_name: logger.warning("No function found, calling LLM...") - return await llm(query) + return llm(query) for function in functions: if function.__name__ == function_name: print(f"Calling function: {function.__name__}") schema = get_schema(function) - inputs = await extract_function_inputs(query, schema) + inputs = extract_function_inputs(query, schema) call_function(function, inputs)