From e55e5e8b76f2400604e0df03a2c290a83f342ff8 Mon Sep 17 00:00:00 2001 From: Siraj R Aizlewood <siraj@aurelio.ai> Date: Tue, 7 May 2024 23:26:52 +0400 Subject: [PATCH] Linting and removal of debugging code. --- docs/examples/function_calling.ipynb | 6 ------ semantic_router/layer.py | 16 ---------------- semantic_router/llms/openai.py | 14 ++++++++++---- semantic_router/route.py | 4 +++- tests/unit/llms/test_llm_openai.py | 2 +- 5 files changed, 14 insertions(+), 28 deletions(-) diff --git a/docs/examples/function_calling.ipynb b/docs/examples/function_calling.ipynb index e8ddd3ab..a0ea9540 100644 --- a/docs/examples/function_calling.ipynb +++ b/docs/examples/function_calling.ipynb @@ -533,12 +533,6 @@ " return function(**route_choice.function_call)\n", "\n", " # If no function is found, use the LLM for general queries\n", - " # DEBUGGING: Start.\n", - " print('#'*50)\n", - " print('query')\n", - " print(query)\n", - " print('#'*50)\n", - " # DEBUGGING: End.\n", " msgs = [Message(role=\"user\", content=query)]\n", " return llm(msgs)\n", "\n", diff --git a/semantic_router/layer.py b/semantic_router/layer.py index 18e78f0f..0b882ff6 100644 --- a/semantic_router/layer.py +++ b/semantic_router/layer.py @@ -235,11 +235,6 @@ class RouteLayer: route_filter: Optional[List[str]] = None, ) -> RouteChoice: # if no vector provided, encode text to get vector - # DEBUGGING: Start. - print('#'*50) - print('CHECKPOINT 1') - print('#'*50) - # DEBUGGING: End. if vector is None: if text is None: raise ValueError("Either text or vector must be provided") @@ -247,11 +242,6 @@ class RouteLayer: route, top_class_scores = self._retrieve_top_route(vector, route_filter) passed = self._check_threshold(top_class_scores, route) - # DEBUGGING: Start. - print('#'*50) - print('CHECKPOINT 2') - print('#'*50) - # DEBUGGING: End. if passed and route is not None and not simulate_static: if route.function_schemas and text is None: raise ValueError( @@ -269,12 +259,6 @@ class RouteLayer: route.llm = self.llm else: route.llm = self.llm - # DEBUGGING: Start. - print('#'*50) - print('text') - print(text) - print('#'*50) - # DEBUGGING: End. return route(text) elif passed and route is not None and simulate_static: return RouteChoice( diff --git a/semantic_router/llms/openai.py b/semantic_router/llms/openai.py index 9ca742a4..6e801298 100644 --- a/semantic_router/llms/openai.py +++ b/semantic_router/llms/openai.py @@ -15,7 +15,9 @@ from semantic_router.utils.function_call import ( ) import inspect import re -from openai.types.chat.chat_completion_message_tool_call import ChatCompletionMessageToolCall +from openai.types.chat.chat_completion_message_tool_call import ( + ChatCompletionMessageToolCall, +) class OpenAILLM(BaseLLM): @@ -70,14 +72,16 @@ class OpenAILLM(BaseLLM): if self.client is None: raise ValueError("OpenAI client is not initialized.") try: - tools: Union[List[Dict[str, Any]], NotGiven] = function_schemas if function_schemas is not None else NOT_GIVEN + tools: Union[List[Dict[str, Any]], NotGiven] = ( + function_schemas if function_schemas is not None else NOT_GIVEN + ) completion = self.client.chat.completions.create( model=self.name, messages=[m.to_openai() for m in messages], temperature=self.temperature, max_tokens=self.max_tokens, - tools=tools, # type: ignore # We pass a list of dicts which get interpreted as Iterable[ChatCompletionToolParam]. + tools=tools, # type: ignore # We pass a list of dicts which get interpreted as Iterable[ChatCompletionToolParam]. ) if function_schemas: @@ -90,7 +94,9 @@ class OpenAILLM(BaseLLM): ) # Collecting multiple tool calls information - output = str(self._extract_tool_calls_info(tool_calls)) # str in keepign with base type. + output = str( + self._extract_tool_calls_info(tool_calls) + ) # str in keepign with base type. else: content = completion.choices[0].message.content if content is None: diff --git a/semantic_router/route.py b/semantic_router/route.py index 38c165d7..a32c778c 100644 --- a/semantic_router/route.py +++ b/semantic_router/route.py @@ -101,7 +101,9 @@ class Route(BaseModel): Generate a dynamic Route object from a list of functions or Pydantic models using LLM """ schemas = function_call.get_schema_list(items=entities) - dynamic_route = cls._generate_dynamic_route(llm=llm, function_schemas=schemas, route_name=route_name) + dynamic_route = cls._generate_dynamic_route( + llm=llm, function_schemas=schemas, route_name=route_name + ) dynamic_route.function_schemas = schemas return dynamic_route diff --git a/tests/unit/llms/test_llm_openai.py b/tests/unit/llms/test_llm_openai.py index 0dc25653..13217c84 100644 --- a/tests/unit/llms/test_llm_openai.py +++ b/tests/unit/llms/test_llm_openai.py @@ -3,6 +3,7 @@ import pytest from semantic_router.llms.openai import OpenAILLM, get_schemas_openai from semantic_router.schema import Message + @pytest.fixture def openai_llm(mocker): mocker.patch("openai.Client") @@ -166,7 +167,6 @@ class TestOpenAILLM: expected_error_message in actual_error_message ), f"Expected error message: '{expected_error_message}', but got: '{actual_error_message}'" - def test_extract_function_inputs(self, openai_llm, mocker): query = "fetch user data" function_schemas = [{"function": "get_user_data", "args": ["user_id"]}] -- GitLab