diff --git a/semantic_router/encoders/cohere.py b/semantic_router/encoders/cohere.py index 4f108095473467c895c5bf5112c35ddba29190c0..f534fa51cb3524b047416260ff688fd9b9769bee 100644 --- a/semantic_router/encoders/cohere.py +++ b/semantic_router/encoders/cohere.py @@ -25,7 +25,9 @@ class CohereEncoder(BaseEncoder): try: self.client = cohere.Client(cohere_api_key) except Exception as e: - raise ValueError(f"Cohere API client failed to initialize. Error: {e}") + raise ValueError( + f"Cohere API client failed to initialize. Error: {e}" + ) from e def __call__(self, docs: List[str]) -> List[List[float]]: if self.client is None: @@ -34,4 +36,4 @@ class CohereEncoder(BaseEncoder): embeds = self.client.embed(docs, input_type="search_query", model=self.name) return embeds.embeddings except Exception as e: - raise ValueError(f"Cohere API call failed. Error: {e}") + raise ValueError(f"Cohere API call failed. Error: {e}") from e diff --git a/semantic_router/encoders/fastembed.py b/semantic_router/encoders/fastembed.py index 018eeee17334aed89db5f6675eb0dbeb409d4448..2b9083691562d19b5006aa2e62b97c1f38de2224 100644 --- a/semantic_router/encoders/fastembed.py +++ b/semantic_router/encoders/fastembed.py @@ -48,4 +48,4 @@ class FastEmbedEncoder(BaseEncoder): embeddings: List[List[float]] = [e.tolist() for e in embeds] return embeddings except Exception as e: - raise ValueError(f"FastEmbed embed failed. Error: {e}") + raise ValueError(f"FastEmbed embed failed. Error: {e}") from e diff --git a/semantic_router/encoders/openai.py b/semantic_router/encoders/openai.py index 761f493102790e7e79d33b0d5a4731f6fa0e7154..ce11251b7256e611fab1f7f415c3db44b6ed3fcc 100644 --- a/semantic_router/encoders/openai.py +++ b/semantic_router/encoders/openai.py @@ -29,7 +29,9 @@ class OpenAIEncoder(BaseEncoder): try: self.client = openai.Client(api_key=api_key) except Exception as e: - raise ValueError(f"OpenAI API client failed to initialize. Error: {e}") + raise ValueError( + f"OpenAI API client failed to initialize. Error: {e}" + ) from e def __call__(self, docs: List[str]) -> List[List[float]]: if self.client is None: @@ -49,7 +51,7 @@ class OpenAIEncoder(BaseEncoder): logger.warning(f"Retrying in {2**j} seconds...") except Exception as e: logger.error(f"OpenAI API call failed. Error: {error_message}") - raise ValueError(f"OpenAI API call failed. Error: {e}") + raise ValueError(f"OpenAI API call failed. Error: {e}") from e if ( not embeds diff --git a/semantic_router/encoders/zure.py b/semantic_router/encoders/zure.py index 8a558d0dc4b860c59da0d2e80a6536b618083b6f..b53fb66259ba38879827594dc7fdd6b106eb9409 100644 --- a/semantic_router/encoders/zure.py +++ b/semantic_router/encoders/zure.py @@ -74,7 +74,9 @@ class AzureOpenAIEncoder(BaseEncoder): # _strict_response_validation=True, ) except Exception as e: - raise ValueError(f"OpenAI API client failed to initialize. Error: {e}") + raise ValueError( + f"OpenAI API client failed to initialize. Error: {e}" + ) from e def __call__(self, docs: List[str]) -> List[List[float]]: if self.client is None: @@ -100,7 +102,7 @@ class AzureOpenAIEncoder(BaseEncoder): logger.warning(f"Retrying in {2**j} seconds...") except Exception as e: logger.error(f"Azure OpenAI API call failed. Error: {error_message}") - raise ValueError(f"Azure OpenAI API call failed. Error: {e}") + raise ValueError(f"Azure OpenAI API call failed. Error: {e}") from e if ( not embeds diff --git a/semantic_router/llms/cohere.py b/semantic_router/llms/cohere.py index 1a3c8e3c7741cb83232e6115ea547aaee8a93015..37eb4338f262f5f1fcf271cb35c3102bd9c98187 100644 --- a/semantic_router/llms/cohere.py +++ b/semantic_router/llms/cohere.py @@ -24,7 +24,9 @@ class CohereLLM(BaseLLM): try: self.client = cohere.Client(cohere_api_key) except Exception as e: - raise ValueError(f"Cohere API client failed to initialize. Error: {e}") + raise ValueError( + f"Cohere API client failed to initialize. Error: {e}" + ) from e def __call__(self, messages: List[Message]) -> str: if self.client is None: @@ -43,4 +45,4 @@ class CohereLLM(BaseLLM): return output except Exception as e: - raise ValueError(f"Cohere API call failed. Error: {e}") + raise ValueError(f"Cohere API call failed. Error: {e}") from e diff --git a/semantic_router/llms/openai.py b/semantic_router/llms/openai.py index b0c7d0e69ce22b5a90f57e42d2c3ada235e221db..a93ce0e7ab2747fc77e537e485b7699779919e04 100644 --- a/semantic_router/llms/openai.py +++ b/semantic_router/llms/openai.py @@ -29,7 +29,9 @@ class OpenAILLM(BaseLLM): try: self.client = openai.OpenAI(api_key=api_key) except Exception as e: - raise ValueError(f"OpenAI API client failed to initialize. Error: {e}") + raise ValueError( + f"OpenAI API client failed to initialize. Error: {e}" + ) from e self.temperature = temperature self.max_tokens = max_tokens @@ -51,4 +53,4 @@ class OpenAILLM(BaseLLM): return output except Exception as e: logger.error(f"LLM error: {e}") - raise Exception(f"LLM error: {e}") + raise Exception(f"LLM error: {e}") from e diff --git a/semantic_router/llms/openrouter.py b/semantic_router/llms/openrouter.py index 4e687207ba206d38ac5d7b831214baeb81a386d4..b00d68a4730c6bf681c0ba4d90a5a79c7febe603 100644 --- a/semantic_router/llms/openrouter.py +++ b/semantic_router/llms/openrouter.py @@ -34,7 +34,9 @@ class OpenRouterLLM(BaseLLM): try: self.client = openai.OpenAI(api_key=api_key, base_url=self.base_url) except Exception as e: - raise ValueError(f"OpenRouter API client failed to initialize. Error: {e}") + raise ValueError( + f"OpenRouter API client failed to initialize. Error: {e}" + ) from e self.temperature = temperature self.max_tokens = max_tokens @@ -56,4 +58,4 @@ class OpenRouterLLM(BaseLLM): return output except Exception as e: logger.error(f"LLM error: {e}") - raise Exception(f"LLM error: {e}") + raise Exception(f"LLM error: {e}") from e diff --git a/semantic_router/utils/llm.py b/semantic_router/utils/llm.py index 4f89566f79df700ec3ae8103b5b8cdfd84700627..5402e47fe27f8695447ab1c5fcc01fd4b8080729 100644 --- a/semantic_router/utils/llm.py +++ b/semantic_router/utils/llm.py @@ -32,7 +32,7 @@ def llm(prompt: str) -> Optional[str]: return output except Exception as e: logger.error(f"LLM error: {e}") - raise Exception(f"LLM error: {e}") + raise Exception(f"LLM error: {e}") from e # TODO integrate async LLM function @@ -62,4 +62,4 @@ def llm(prompt: str) -> Optional[str]: # return output # except Exception as e: # logger.error(f"LLM error: {e}") -# raise Exception(f"LLM error: {e}") +# raise Exception(f"LLM error: {e}") from e