From 02c46ca85ecc6756f3243d2d98189d940aa78cf4 Mon Sep 17 00:00:00 2001
From: Siraj R Aizlewood <siraj@aurelio.ai>
Date: Sun, 28 Apr 2024 22:50:26 +0400
Subject: [PATCH] Removed debugging code.

---
 semantic_router/llms/openai.py | 28 ----------------------------
 semantic_router/route.py       |  6 ------
 2 files changed, 34 deletions(-)

diff --git a/semantic_router/llms/openai.py b/semantic_router/llms/openai.py
index 4e2af247..90728210 100644
--- a/semantic_router/llms/openai.py
+++ b/semantic_router/llms/openai.py
@@ -44,12 +44,6 @@ class OpenAILLM(BaseLLM):
                 tools = [function_schema] 
             else:
                 tools = None
-            # DEBUGGING: Start.
-            print('#'*50)
-            print('tools')
-            print(tools)
-            print('#'*50)
-            # DEBUGGING: End.
             completion = self.client.chat.completions.create(
                 model=self.name,
                 messages=[m.to_openai() for m in messages],
@@ -59,14 +53,6 @@ class OpenAILLM(BaseLLM):
             )
 
             output = completion.choices[0].message.content
-            # DEBUGGING: Start.
-            print('#'*50)
-            # print('print(completion.choices[0].message.function_call)')
-            # print(print(completion.choices[0].message.function_call))
-            print('completion.choices[0].message.tool_calls')
-            print(completion.choices[0].message.tool_calls)
-            print('#'*50)
-            # DEBUGGING: End.
 
             if function_schema:
                 return completion.choices[0].message.tool_calls
@@ -93,24 +79,10 @@ class OpenAILLM(BaseLLM):
         output = self(messages=messages, function_schema=function_schema)
         if not output:
             raise Exception("No output generated for extract function input")
-        # DEBUGGING: Start.
-        print('#'*50)
-        print('output')
-        print(output)
-        print('#'*50)
-        # DEBUGGING: End.
         if len(output) != 1:
             raise ValueError("Invalid output, expected a single tool to be called")
         tool_call = output[0]
         arguments_json = tool_call.function.arguments
         function_inputs = json.loads(arguments_json)
-
-        # DEBUGGING: Start.
-        print('#'*50)
-        print('function_inputs')
-        print(function_inputs)
-        print('#'*50)
-        # DEBUGGING: End.
-
         return function_inputs
                 
\ No newline at end of file
diff --git a/semantic_router/route.py b/semantic_router/route.py
index b0580885..3a62fab1 100644
--- a/semantic_router/route.py
+++ b/semantic_router/route.py
@@ -76,12 +76,6 @@ class Route(BaseModel):
             extracted_inputs = self.llm.extract_function_inputs(
                 query=query, function_schema=self.function_schema
             )
-            # DEBUGGING: Start.
-            print('#'*50)
-            print('extracted_inputs')
-            print(extracted_inputs)
-            print('#'*50)
-            # DEBUGGING: End.
             func_call = extracted_inputs
         elif self.openai_function_schema:
             if not isinstance(self.llm, OpenAILLM):
-- 
GitLab