Skip to content
Snippets Groups Projects
Unverified Commit 02c46ca8 authored by Siraj R Aizlewood's avatar Siraj R Aizlewood
Browse files

Removed debugging code.

parent b7686a40
No related branches found
No related tags found
No related merge requests found
...@@ -44,12 +44,6 @@ class OpenAILLM(BaseLLM): ...@@ -44,12 +44,6 @@ class OpenAILLM(BaseLLM):
tools = [function_schema] tools = [function_schema]
else: else:
tools = None tools = None
# DEBUGGING: Start.
print('#'*50)
print('tools')
print(tools)
print('#'*50)
# DEBUGGING: End.
completion = self.client.chat.completions.create( completion = self.client.chat.completions.create(
model=self.name, model=self.name,
messages=[m.to_openai() for m in messages], messages=[m.to_openai() for m in messages],
...@@ -59,14 +53,6 @@ class OpenAILLM(BaseLLM): ...@@ -59,14 +53,6 @@ class OpenAILLM(BaseLLM):
) )
output = completion.choices[0].message.content output = completion.choices[0].message.content
# DEBUGGING: Start.
print('#'*50)
# print('print(completion.choices[0].message.function_call)')
# print(print(completion.choices[0].message.function_call))
print('completion.choices[0].message.tool_calls')
print(completion.choices[0].message.tool_calls)
print('#'*50)
# DEBUGGING: End.
if function_schema: if function_schema:
return completion.choices[0].message.tool_calls return completion.choices[0].message.tool_calls
...@@ -93,24 +79,10 @@ class OpenAILLM(BaseLLM): ...@@ -93,24 +79,10 @@ class OpenAILLM(BaseLLM):
output = self(messages=messages, function_schema=function_schema) output = self(messages=messages, function_schema=function_schema)
if not output: if not output:
raise Exception("No output generated for extract function input") raise Exception("No output generated for extract function input")
# DEBUGGING: Start.
print('#'*50)
print('output')
print(output)
print('#'*50)
# DEBUGGING: End.
if len(output) != 1: if len(output) != 1:
raise ValueError("Invalid output, expected a single tool to be called") raise ValueError("Invalid output, expected a single tool to be called")
tool_call = output[0] tool_call = output[0]
arguments_json = tool_call.function.arguments arguments_json = tool_call.function.arguments
function_inputs = json.loads(arguments_json) function_inputs = json.loads(arguments_json)
# DEBUGGING: Start.
print('#'*50)
print('function_inputs')
print(function_inputs)
print('#'*50)
# DEBUGGING: End.
return function_inputs return function_inputs
\ No newline at end of file
...@@ -76,12 +76,6 @@ class Route(BaseModel): ...@@ -76,12 +76,6 @@ class Route(BaseModel):
extracted_inputs = self.llm.extract_function_inputs( extracted_inputs = self.llm.extract_function_inputs(
query=query, function_schema=self.function_schema query=query, function_schema=self.function_schema
) )
# DEBUGGING: Start.
print('#'*50)
print('extracted_inputs')
print(extracted_inputs)
print('#'*50)
# DEBUGGING: End.
func_call = extracted_inputs func_call = extracted_inputs
elif self.openai_function_schema: elif self.openai_function_schema:
if not isinstance(self.llm, OpenAILLM): if not isinstance(self.llm, OpenAILLM):
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment