diff --git a/semantic_router/llms/openai.py b/semantic_router/llms/openai.py index ee621bc888c9876fcac36717e019c72d1978734b..dcce3b45b41141db82a45b6c27e55030e432ea87 100644 --- a/semantic_router/llms/openai.py +++ b/semantic_router/llms/openai.py @@ -67,12 +67,10 @@ class OpenAILLM(BaseLLM): ) arguments = tool_calls[0].function.arguments if arguments is None: - raise ValueError( - "Invalid output, expected arguments to be specified." - ) + raise ValueError("Invalid output, expected arguments to be specified.") output = str(arguments) # str to keep MyPy happy. return output - + except Exception as e: logger.error(f"LLM error: {e}") raise Exception(f"LLM error: {e}") from e @@ -84,8 +82,6 @@ class OpenAILLM(BaseLLM): system_prompt = "You are an intelligent AI. Given a command or request from the user, call the function to complete the request." messages.append(Message(role="system", content=system_prompt)) messages.append(Message(role="user", content=query)) - function_inputs_str = self( - messages=messages, function_schema=function_schema - ) + function_inputs_str = self(messages=messages, function_schema=function_schema) function_inputs = json.loads(function_inputs_str) return function_inputs diff --git a/semantic_router/route.py b/semantic_router/route.py index 01901ff903be926d00d421059ca661842a8dc63f..40f8453f8c73313df4071ee88c2991b4e0a58ec2 100644 --- a/semantic_router/route.py +++ b/semantic_router/route.py @@ -8,7 +8,6 @@ from semantic_router.llms import BaseLLM from semantic_router.schema import Message, RouteChoice from semantic_router.utils import function_call from semantic_router.utils.logger import logger -from semantic_router.llms import OpenAILLM try: from PIL.Image import Image