From 5a3962024eab53b10934063a2c86df6d379e6a89 Mon Sep 17 00:00:00 2001 From: Siraj R Aizlewood <siraj@aurelio.ai> Date: Tue, 30 Apr 2024 02:20:52 +0400 Subject: [PATCH] Linting and removal of temporary comparison __call__ in openai.py. --- semantic_router/llms/openai.py | 28 +++++----------------------- 1 file changed, 5 insertions(+), 23 deletions(-) diff --git a/semantic_router/llms/openai.py b/semantic_router/llms/openai.py index 920d954f..0f561cc4 100644 --- a/semantic_router/llms/openai.py +++ b/semantic_router/llms/openai.py @@ -68,36 +68,18 @@ class OpenAILLM(BaseLLM): ) arguments = tool_calls[0].function.arguments if arguments is None: - raise ValueError("Invalid output, expected arguments to be specified.") + raise ValueError( + "Invalid output, expected arguments to be specified." + ) output = str(arguments) # str to keep MyPy happy. else: content = completion.choices[0].message.content if content is None: raise ValueError("Invalid output, expected content.") - output = str(content) # str to keep MyPy happy. - - return output - - except Exception as e: - logger.error(f"LLM error: {e}") - raise Exception(f"LLM error: {e}") from e - - def __call__(self, messages: List[Message]) -> str: - if self.client is None: - raise ValueError("OpenAI client is not initialized.") - try: - completion = self.client.chat.completions.create( - model=self.name, - messages=[m.to_openai() for m in messages], - temperature=self.temperature, - max_tokens=self.max_tokens, - ) + output = str(content) # str to keep MyPy happy. - output = completion.choices[0].message.content - - if not output: - raise Exception("No output generated") return output + except Exception as e: logger.error(f"LLM error: {e}") raise Exception(f"LLM error: {e}") from e -- GitLab