Skip to content
Snippets Groups Projects
Unverified Commit 5a396202 authored by Siraj R Aizlewood's avatar Siraj R Aizlewood
Browse files

Linting and removal of temporary comparison __call__ in openai.py.

parent 307cc1b1
No related branches found
No related tags found
No related merge requests found
...@@ -68,36 +68,18 @@ class OpenAILLM(BaseLLM): ...@@ -68,36 +68,18 @@ class OpenAILLM(BaseLLM):
) )
arguments = tool_calls[0].function.arguments arguments = tool_calls[0].function.arguments
if arguments is None: if arguments is None:
raise ValueError("Invalid output, expected arguments to be specified.") raise ValueError(
"Invalid output, expected arguments to be specified."
)
output = str(arguments) # str to keep MyPy happy. output = str(arguments) # str to keep MyPy happy.
else: else:
content = completion.choices[0].message.content content = completion.choices[0].message.content
if content is None: if content is None:
raise ValueError("Invalid output, expected content.") raise ValueError("Invalid output, expected content.")
output = str(content) # str to keep MyPy happy. output = str(content) # str to keep MyPy happy.
return output
except Exception as e:
logger.error(f"LLM error: {e}")
raise Exception(f"LLM error: {e}") from e
def __call__(self, messages: List[Message]) -> str:
if self.client is None:
raise ValueError("OpenAI client is not initialized.")
try:
completion = self.client.chat.completions.create(
model=self.name,
messages=[m.to_openai() for m in messages],
temperature=self.temperature,
max_tokens=self.max_tokens,
)
output = completion.choices[0].message.content
if not output:
raise Exception("No output generated")
return output return output
except Exception as e: except Exception as e:
logger.error(f"LLM error: {e}") logger.error(f"LLM error: {e}")
raise Exception(f"LLM error: {e}") from e raise Exception(f"LLM error: {e}") from e
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment