diff --git a/docs/examples/function_calling.ipynb b/docs/examples/function_calling.ipynb
index e8ddd3ab7e12a3e9435871fe6797c5085001190f..a0ea95400585a7a8fe942d3e0db7946a011869f5 100644
--- a/docs/examples/function_calling.ipynb
+++ b/docs/examples/function_calling.ipynb
@@ -533,12 +533,6 @@
     "                return function(**route_choice.function_call)\n",
     "\n",
     "    # If no function is found, use the LLM for general queries\n",
-    "    # DEBUGGING: Start.\n",
-    "    print('#'*50)\n",
-    "    print('query')\n",
-    "    print(query)\n",
-    "    print('#'*50)\n",
-    "    # DEBUGGING: End.\n",
     "    msgs = [Message(role=\"user\", content=query)]\n",
     "    return llm(msgs)\n",
     "\n",
diff --git a/semantic_router/layer.py b/semantic_router/layer.py
index 18e78f0f06fb026643c392dc728efce55a89f199..0b882ff67a440db1dc9d33e2eec3f30622d4ade3 100644
--- a/semantic_router/layer.py
+++ b/semantic_router/layer.py
@@ -235,11 +235,6 @@ class RouteLayer:
         route_filter: Optional[List[str]] = None,
     ) -> RouteChoice:
         # if no vector provided, encode text to get vector
-        # DEBUGGING: Start.
-        print('#'*50)
-        print('CHECKPOINT 1')
-        print('#'*50)
-        # DEBUGGING: End.
         if vector is None:
             if text is None:
                 raise ValueError("Either text or vector must be provided")
@@ -247,11 +242,6 @@ class RouteLayer:
 
         route, top_class_scores = self._retrieve_top_route(vector, route_filter)
         passed = self._check_threshold(top_class_scores, route)
-        # DEBUGGING: Start.
-        print('#'*50)
-        print('CHECKPOINT 2')
-        print('#'*50)
-        # DEBUGGING: End.
         if passed and route is not None and not simulate_static:
             if route.function_schemas and text is None:
                 raise ValueError(
@@ -269,12 +259,6 @@ class RouteLayer:
                     route.llm = self.llm
                 else:
                     route.llm = self.llm
-            # DEBUGGING: Start.
-            print('#'*50)
-            print('text')
-            print(text)
-            print('#'*50)
-            # DEBUGGING: End.
             return route(text)
         elif passed and route is not None and simulate_static:
             return RouteChoice(
diff --git a/semantic_router/llms/openai.py b/semantic_router/llms/openai.py
index 9ca742a44f4af8b9a192602964d038558eb13583..6e801298b764e86ad286b23eef2b6ae386a003bc 100644
--- a/semantic_router/llms/openai.py
+++ b/semantic_router/llms/openai.py
@@ -15,7 +15,9 @@ from semantic_router.utils.function_call import (
 )
 import inspect
 import re
-from openai.types.chat.chat_completion_message_tool_call import ChatCompletionMessageToolCall
+from openai.types.chat.chat_completion_message_tool_call import (
+    ChatCompletionMessageToolCall,
+)
 
 
 class OpenAILLM(BaseLLM):
@@ -70,14 +72,16 @@ class OpenAILLM(BaseLLM):
         if self.client is None:
             raise ValueError("OpenAI client is not initialized.")
         try:
-            tools: Union[List[Dict[str, Any]], NotGiven] = function_schemas if function_schemas is not None else NOT_GIVEN
+            tools: Union[List[Dict[str, Any]], NotGiven] = (
+                function_schemas if function_schemas is not None else NOT_GIVEN
+            )
 
             completion = self.client.chat.completions.create(
                 model=self.name,
                 messages=[m.to_openai() for m in messages],
                 temperature=self.temperature,
                 max_tokens=self.max_tokens,
-                tools=tools, # type: ignore # We pass a list of dicts which get interpreted as Iterable[ChatCompletionToolParam].
+                tools=tools,  # type: ignore # We pass a list of dicts which get interpreted as Iterable[ChatCompletionToolParam].
             )
 
             if function_schemas:
@@ -90,7 +94,9 @@ class OpenAILLM(BaseLLM):
                     )
 
                 # Collecting multiple tool calls information
-                output = str(self._extract_tool_calls_info(tool_calls)) # str in keepign with base type.
+                output = str(
+                    self._extract_tool_calls_info(tool_calls)
+                )  # str in keepign with base type.
             else:
                 content = completion.choices[0].message.content
                 if content is None:
diff --git a/semantic_router/route.py b/semantic_router/route.py
index 38c165d7dd94f3f831052866dc611fc5d5240f80..a32c778c8506217f542060683bb498989b560c67 100644
--- a/semantic_router/route.py
+++ b/semantic_router/route.py
@@ -101,7 +101,9 @@ class Route(BaseModel):
         Generate a dynamic Route object from a list of functions or Pydantic models using LLM
         """
         schemas = function_call.get_schema_list(items=entities)
-        dynamic_route = cls._generate_dynamic_route(llm=llm, function_schemas=schemas, route_name=route_name)
+        dynamic_route = cls._generate_dynamic_route(
+            llm=llm, function_schemas=schemas, route_name=route_name
+        )
         dynamic_route.function_schemas = schemas
         return dynamic_route
 
diff --git a/tests/unit/llms/test_llm_openai.py b/tests/unit/llms/test_llm_openai.py
index 0dc25653aad4a7591e39aaf515bb5aeb72013487..13217c8475d35e9107b170140ff64a8c29c1380e 100644
--- a/tests/unit/llms/test_llm_openai.py
+++ b/tests/unit/llms/test_llm_openai.py
@@ -3,6 +3,7 @@ import pytest
 from semantic_router.llms.openai import OpenAILLM, get_schemas_openai
 from semantic_router.schema import Message
 
+
 @pytest.fixture
 def openai_llm(mocker):
     mocker.patch("openai.Client")
@@ -166,7 +167,6 @@ class TestOpenAILLM:
             expected_error_message in actual_error_message
         ), f"Expected error message: '{expected_error_message}', but got: '{actual_error_message}'"
 
-
     def test_extract_function_inputs(self, openai_llm, mocker):
         query = "fetch user data"
         function_schemas = [{"function": "get_user_data", "args": ["user_id"]}]