diff --git a/docs/00-introduction.ipynb b/docs/00-introduction.ipynb index 2dfb4e81bf73e8aa7c5f969b03b64320b57c5cbe..437c6ec528fd5bf232ba24cdc9762d2b861a10b9 100644 --- a/docs/00-introduction.ipynb +++ b/docs/00-introduction.ipynb @@ -41,7 +41,7 @@ "metadata": {}, "outputs": [], "source": [ - "!pip install -qU semantic-router==0.0.16" + "!pip install -qU semantic-router==0.0.17" ] }, { diff --git a/docs/01-save-load-from-file.ipynb b/docs/01-save-load-from-file.ipynb index 062474be79bffc91f236b19119520ed87f7b1cfe..b00897b8e0b4dec58959d803754da3e8a7569a25 100644 --- a/docs/01-save-load-from-file.ipynb +++ b/docs/01-save-load-from-file.ipynb @@ -36,7 +36,7 @@ "metadata": {}, "outputs": [], "source": [ - "!pip install -qU semantic-router==0.0.16" + "!pip install -qU semantic-router==0.0.17" ] }, { diff --git a/docs/02-dynamic-routes.ipynb b/docs/02-dynamic-routes.ipynb index 72c8a3ec44c7e0d24df7a59c90787868bb25d9d8..7507b8c0cdd64e6ac584139467e1ebcfb87176d5 100644 --- a/docs/02-dynamic-routes.ipynb +++ b/docs/02-dynamic-routes.ipynb @@ -48,7 +48,7 @@ }, "outputs": [], "source": [ - "!pip install -qU semantic-router==0.0.16" + "!pip install -qU semantic-router==0.0.17" ] }, { diff --git a/docs/03-basic-langchain-agent.ipynb b/docs/03-basic-langchain-agent.ipynb index 6eac40982bf757aa11697b85ca3175281c67d55f..54c6fdd087d3781161b1170651f86c3db7913b22 100644 --- a/docs/03-basic-langchain-agent.ipynb +++ b/docs/03-basic-langchain-agent.ipynb @@ -78,7 +78,7 @@ ], "source": [ "!pip install -qU \\\n", - " semantic-router==0.0.16 \\\n", + " semantic-router==0.0.17 \\\n", " langchain==0.0.352 \\\n", " openai==1.6.1" ] diff --git a/docs/05-local-execution.ipynb b/docs/05-local-execution.ipynb index cdb6c3feb3645e8640a91d76855d5f6523b0b549..38ee55544782dbb7c9b3b15d02953d2fbf12fa3c 100644 --- a/docs/05-local-execution.ipynb +++ b/docs/05-local-execution.ipynb @@ -41,7 +41,7 @@ "metadata": {}, "outputs": [], "source": [ - "!pip install -qU \"semantic-router[local]==0.0.16\"" + "!pip install -qU \"semantic-router[local]==0.0.17\"" ] }, { diff --git a/poetry.lock b/poetry.lock index 9fa1d5474365f1acfce16e50a8ea20a57d42b0ec..4ff76b724aca26d398a26a3aae78d32699282589 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1058,13 +1058,13 @@ files = [ [[package]] name = "ipykernel" -version = "6.28.0" +version = "6.29.0" description = "IPython Kernel for Jupyter" optional = false python-versions = ">=3.8" files = [ - {file = "ipykernel-6.28.0-py3-none-any.whl", hash = "sha256:c6e9a9c63a7f4095c0a22a79f765f079f9ec7be4f2430a898ddea889e8665661"}, - {file = "ipykernel-6.28.0.tar.gz", hash = "sha256:69c11403d26de69df02225916f916b37ea4b9af417da0a8c827f84328d88e5f3"}, + {file = "ipykernel-6.29.0-py3-none-any.whl", hash = "sha256:076663ca68492576f051e4af7720d33f34383e655f2be0d544c8b1c9de915b2f"}, + {file = "ipykernel-6.29.0.tar.gz", hash = "sha256:b5dd3013cab7b330df712891c96cd1ab868c27a7159e606f762015e9bf8ceb3f"}, ] [package.dependencies] @@ -1087,7 +1087,7 @@ cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] pyqt5 = ["pyqt5"] pyside6 = ["pyside6"] -test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov", "pytest-timeout"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (==0.23.2)", "pytest-cov", "pytest-timeout"] [[package]] name = "ipython" @@ -1846,13 +1846,13 @@ sympy = "*" [[package]] name = "openai" -version = "1.7.2" +version = "1.8.0" description = "The official Python library for the openai API" optional = false python-versions = ">=3.7.1" files = [ - {file = "openai-1.7.2-py3-none-any.whl", hash = "sha256:8f41b90a762f5fd9d182b45851041386fed94c8ad240a70abefee61a68e0ef53"}, - {file = "openai-1.7.2.tar.gz", hash = "sha256:c73c78878258b07f1b468b0602c6591f25a1478f49ecb90b9bd44b7cc80bce73"}, + {file = "openai-1.8.0-py3-none-any.whl", hash = "sha256:0f8f53805826103fdd8adaf379ad3ec23f9d867e698cbc14caf34b778d150175"}, + {file = "openai-1.8.0.tar.gz", hash = "sha256:93366be27802f517e89328801913d2a5ede45e3b86fdcab420385b8a1b88c767"}, ] [package.dependencies] diff --git a/pyproject.toml b/pyproject.toml index 63d849cea0e3c501f86013bf79b18333f42866db..84acdcb76ecea3c67dcccdd9846e60d0e59e2e15 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "semantic-router" -version = "0.0.16" +version = "0.0.17" description = "Super fast semantic router for AI decision making" authors = [ "James Briggs <james@aurelio.ai>", diff --git a/semantic_router/__init__.py b/semantic_router/__init__.py index 1c604af8065f9b2e1519e6f92daf7af2739d584b..fbf86ef7b6970cf67d102f4c5041a44fa95a9f94 100644 --- a/semantic_router/__init__.py +++ b/semantic_router/__init__.py @@ -3,3 +3,5 @@ from semantic_router.layer import LayerConfig, RouteLayer from semantic_router.route import Route __all__ = ["RouteLayer", "HybridRouteLayer", "Route", "LayerConfig"] + +__version__ = "0.0.17" diff --git a/semantic_router/encoders/cohere.py b/semantic_router/encoders/cohere.py index 4f108095473467c895c5bf5112c35ddba29190c0..f534fa51cb3524b047416260ff688fd9b9769bee 100644 --- a/semantic_router/encoders/cohere.py +++ b/semantic_router/encoders/cohere.py @@ -25,7 +25,9 @@ class CohereEncoder(BaseEncoder): try: self.client = cohere.Client(cohere_api_key) except Exception as e: - raise ValueError(f"Cohere API client failed to initialize. Error: {e}") + raise ValueError( + f"Cohere API client failed to initialize. Error: {e}" + ) from e def __call__(self, docs: List[str]) -> List[List[float]]: if self.client is None: @@ -34,4 +36,4 @@ class CohereEncoder(BaseEncoder): embeds = self.client.embed(docs, input_type="search_query", model=self.name) return embeds.embeddings except Exception as e: - raise ValueError(f"Cohere API call failed. Error: {e}") + raise ValueError(f"Cohere API call failed. Error: {e}") from e diff --git a/semantic_router/encoders/fastembed.py b/semantic_router/encoders/fastembed.py index d227d43affeee7bf88c6f6a8a15ea8b5db0aa3f0..c540d17f9b48bf33c0d2ddd1c310f8a40896786f 100644 --- a/semantic_router/encoders/fastembed.py +++ b/semantic_router/encoders/fastembed.py @@ -48,4 +48,4 @@ class FastEmbedEncoder(BaseEncoder): embeddings: List[List[float]] = [e.tolist() for e in embeds] return embeddings except Exception as e: - raise ValueError(f"FastEmbed embed failed. Error: {e}") + raise ValueError(f"FastEmbed embed failed. Error: {e}") from e diff --git a/semantic_router/encoders/openai.py b/semantic_router/encoders/openai.py index 761f493102790e7e79d33b0d5a4731f6fa0e7154..ce11251b7256e611fab1f7f415c3db44b6ed3fcc 100644 --- a/semantic_router/encoders/openai.py +++ b/semantic_router/encoders/openai.py @@ -29,7 +29,9 @@ class OpenAIEncoder(BaseEncoder): try: self.client = openai.Client(api_key=api_key) except Exception as e: - raise ValueError(f"OpenAI API client failed to initialize. Error: {e}") + raise ValueError( + f"OpenAI API client failed to initialize. Error: {e}" + ) from e def __call__(self, docs: List[str]) -> List[List[float]]: if self.client is None: @@ -49,7 +51,7 @@ class OpenAIEncoder(BaseEncoder): logger.warning(f"Retrying in {2**j} seconds...") except Exception as e: logger.error(f"OpenAI API call failed. Error: {error_message}") - raise ValueError(f"OpenAI API call failed. Error: {e}") + raise ValueError(f"OpenAI API call failed. Error: {e}") from e if ( not embeds diff --git a/semantic_router/encoders/zure.py b/semantic_router/encoders/zure.py index 8a558d0dc4b860c59da0d2e80a6536b618083b6f..b53fb66259ba38879827594dc7fdd6b106eb9409 100644 --- a/semantic_router/encoders/zure.py +++ b/semantic_router/encoders/zure.py @@ -74,7 +74,9 @@ class AzureOpenAIEncoder(BaseEncoder): # _strict_response_validation=True, ) except Exception as e: - raise ValueError(f"OpenAI API client failed to initialize. Error: {e}") + raise ValueError( + f"OpenAI API client failed to initialize. Error: {e}" + ) from e def __call__(self, docs: List[str]) -> List[List[float]]: if self.client is None: @@ -100,7 +102,7 @@ class AzureOpenAIEncoder(BaseEncoder): logger.warning(f"Retrying in {2**j} seconds...") except Exception as e: logger.error(f"Azure OpenAI API call failed. Error: {error_message}") - raise ValueError(f"Azure OpenAI API call failed. Error: {e}") + raise ValueError(f"Azure OpenAI API call failed. Error: {e}") from e if ( not embeds diff --git a/semantic_router/llms/cohere.py b/semantic_router/llms/cohere.py index 1a3c8e3c7741cb83232e6115ea547aaee8a93015..37eb4338f262f5f1fcf271cb35c3102bd9c98187 100644 --- a/semantic_router/llms/cohere.py +++ b/semantic_router/llms/cohere.py @@ -24,7 +24,9 @@ class CohereLLM(BaseLLM): try: self.client = cohere.Client(cohere_api_key) except Exception as e: - raise ValueError(f"Cohere API client failed to initialize. Error: {e}") + raise ValueError( + f"Cohere API client failed to initialize. Error: {e}" + ) from e def __call__(self, messages: List[Message]) -> str: if self.client is None: @@ -43,4 +45,4 @@ class CohereLLM(BaseLLM): return output except Exception as e: - raise ValueError(f"Cohere API call failed. Error: {e}") + raise ValueError(f"Cohere API call failed. Error: {e}") from e diff --git a/semantic_router/llms/openai.py b/semantic_router/llms/openai.py index b0c7d0e69ce22b5a90f57e42d2c3ada235e221db..a93ce0e7ab2747fc77e537e485b7699779919e04 100644 --- a/semantic_router/llms/openai.py +++ b/semantic_router/llms/openai.py @@ -29,7 +29,9 @@ class OpenAILLM(BaseLLM): try: self.client = openai.OpenAI(api_key=api_key) except Exception as e: - raise ValueError(f"OpenAI API client failed to initialize. Error: {e}") + raise ValueError( + f"OpenAI API client failed to initialize. Error: {e}" + ) from e self.temperature = temperature self.max_tokens = max_tokens @@ -51,4 +53,4 @@ class OpenAILLM(BaseLLM): return output except Exception as e: logger.error(f"LLM error: {e}") - raise Exception(f"LLM error: {e}") + raise Exception(f"LLM error: {e}") from e diff --git a/semantic_router/llms/openrouter.py b/semantic_router/llms/openrouter.py index 4e687207ba206d38ac5d7b831214baeb81a386d4..b00d68a4730c6bf681c0ba4d90a5a79c7febe603 100644 --- a/semantic_router/llms/openrouter.py +++ b/semantic_router/llms/openrouter.py @@ -34,7 +34,9 @@ class OpenRouterLLM(BaseLLM): try: self.client = openai.OpenAI(api_key=api_key, base_url=self.base_url) except Exception as e: - raise ValueError(f"OpenRouter API client failed to initialize. Error: {e}") + raise ValueError( + f"OpenRouter API client failed to initialize. Error: {e}" + ) from e self.temperature = temperature self.max_tokens = max_tokens @@ -56,4 +58,4 @@ class OpenRouterLLM(BaseLLM): return output except Exception as e: logger.error(f"LLM error: {e}") - raise Exception(f"LLM error: {e}") + raise Exception(f"LLM error: {e}") from e diff --git a/semantic_router/utils/llm.py b/semantic_router/utils/llm.py index 4f89566f79df700ec3ae8103b5b8cdfd84700627..5402e47fe27f8695447ab1c5fcc01fd4b8080729 100644 --- a/semantic_router/utils/llm.py +++ b/semantic_router/utils/llm.py @@ -32,7 +32,7 @@ def llm(prompt: str) -> Optional[str]: return output except Exception as e: logger.error(f"LLM error: {e}") - raise Exception(f"LLM error: {e}") + raise Exception(f"LLM error: {e}") from e # TODO integrate async LLM function @@ -62,4 +62,4 @@ def llm(prompt: str) -> Optional[str]: # return output # except Exception as e: # logger.error(f"LLM error: {e}") -# raise Exception(f"LLM error: {e}") +# raise Exception(f"LLM error: {e}") from e