Skip to content
Snippets Groups Projects
Unverified Commit a5cfa036 authored by James Briggs's avatar James Briggs Committed by GitHub
Browse files

Merge pull request #105 from alongadot/issue-94/reraise-exceptions-w-trace

fix: issue #94 raise from exceptions
parents 779072be d8319a6e
Branches
Tags
No related merge requests found
......@@ -25,7 +25,9 @@ class CohereEncoder(BaseEncoder):
try:
self.client = cohere.Client(cohere_api_key)
except Exception as e:
raise ValueError(f"Cohere API client failed to initialize. Error: {e}")
raise ValueError(
f"Cohere API client failed to initialize. Error: {e}"
) from e
def __call__(self, docs: List[str]) -> List[List[float]]:
if self.client is None:
......@@ -34,4 +36,4 @@ class CohereEncoder(BaseEncoder):
embeds = self.client.embed(docs, input_type="search_query", model=self.name)
return embeds.embeddings
except Exception as e:
raise ValueError(f"Cohere API call failed. Error: {e}")
raise ValueError(f"Cohere API call failed. Error: {e}") from e
......@@ -48,4 +48,4 @@ class FastEmbedEncoder(BaseEncoder):
embeddings: List[List[float]] = [e.tolist() for e in embeds]
return embeddings
except Exception as e:
raise ValueError(f"FastEmbed embed failed. Error: {e}")
raise ValueError(f"FastEmbed embed failed. Error: {e}") from e
......@@ -29,7 +29,9 @@ class OpenAIEncoder(BaseEncoder):
try:
self.client = openai.Client(api_key=api_key)
except Exception as e:
raise ValueError(f"OpenAI API client failed to initialize. Error: {e}")
raise ValueError(
f"OpenAI API client failed to initialize. Error: {e}"
) from e
def __call__(self, docs: List[str]) -> List[List[float]]:
if self.client is None:
......@@ -49,7 +51,7 @@ class OpenAIEncoder(BaseEncoder):
logger.warning(f"Retrying in {2**j} seconds...")
except Exception as e:
logger.error(f"OpenAI API call failed. Error: {error_message}")
raise ValueError(f"OpenAI API call failed. Error: {e}")
raise ValueError(f"OpenAI API call failed. Error: {e}") from e
if (
not embeds
......
......@@ -74,7 +74,9 @@ class AzureOpenAIEncoder(BaseEncoder):
# _strict_response_validation=True,
)
except Exception as e:
raise ValueError(f"OpenAI API client failed to initialize. Error: {e}")
raise ValueError(
f"OpenAI API client failed to initialize. Error: {e}"
) from e
def __call__(self, docs: List[str]) -> List[List[float]]:
if self.client is None:
......@@ -100,7 +102,7 @@ class AzureOpenAIEncoder(BaseEncoder):
logger.warning(f"Retrying in {2**j} seconds...")
except Exception as e:
logger.error(f"Azure OpenAI API call failed. Error: {error_message}")
raise ValueError(f"Azure OpenAI API call failed. Error: {e}")
raise ValueError(f"Azure OpenAI API call failed. Error: {e}") from e
if (
not embeds
......
......@@ -24,7 +24,9 @@ class CohereLLM(BaseLLM):
try:
self.client = cohere.Client(cohere_api_key)
except Exception as e:
raise ValueError(f"Cohere API client failed to initialize. Error: {e}")
raise ValueError(
f"Cohere API client failed to initialize. Error: {e}"
) from e
def __call__(self, messages: List[Message]) -> str:
if self.client is None:
......@@ -43,4 +45,4 @@ class CohereLLM(BaseLLM):
return output
except Exception as e:
raise ValueError(f"Cohere API call failed. Error: {e}")
raise ValueError(f"Cohere API call failed. Error: {e}") from e
......@@ -29,7 +29,9 @@ class OpenAILLM(BaseLLM):
try:
self.client = openai.OpenAI(api_key=api_key)
except Exception as e:
raise ValueError(f"OpenAI API client failed to initialize. Error: {e}")
raise ValueError(
f"OpenAI API client failed to initialize. Error: {e}"
) from e
self.temperature = temperature
self.max_tokens = max_tokens
......@@ -51,4 +53,4 @@ class OpenAILLM(BaseLLM):
return output
except Exception as e:
logger.error(f"LLM error: {e}")
raise Exception(f"LLM error: {e}")
raise Exception(f"LLM error: {e}") from e
......@@ -34,7 +34,9 @@ class OpenRouterLLM(BaseLLM):
try:
self.client = openai.OpenAI(api_key=api_key, base_url=self.base_url)
except Exception as e:
raise ValueError(f"OpenRouter API client failed to initialize. Error: {e}")
raise ValueError(
f"OpenRouter API client failed to initialize. Error: {e}"
) from e
self.temperature = temperature
self.max_tokens = max_tokens
......@@ -56,4 +58,4 @@ class OpenRouterLLM(BaseLLM):
return output
except Exception as e:
logger.error(f"LLM error: {e}")
raise Exception(f"LLM error: {e}")
raise Exception(f"LLM error: {e}") from e
......@@ -32,7 +32,7 @@ def llm(prompt: str) -> Optional[str]:
return output
except Exception as e:
logger.error(f"LLM error: {e}")
raise Exception(f"LLM error: {e}")
raise Exception(f"LLM error: {e}") from e
# TODO integrate async LLM function
......@@ -62,4 +62,4 @@ def llm(prompt: str) -> Optional[str]:
# return output
# except Exception as e:
# logger.error(f"LLM error: {e}")
# raise Exception(f"LLM error: {e}")
# raise Exception(f"LLM error: {e}") from e
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment