Skip to content
Snippets Groups Projects
Unverified Commit 1fc9df9c authored by Simonas's avatar Simonas Committed by GitHub
Browse files

Merge pull request #75 from...

Merge pull request #75 from aurelio-labs/simonas/doc-update-for-semantic-routerdocsexamples-function_callingipynb

feat: Updated function calling example
parents f5034522 6572f3fa
No related branches found
No related tags found
No related merge requests found
...@@ -17,4 +17,5 @@ mac.env ...@@ -17,4 +17,5 @@ mac.env
.coverage .coverage
.coverage.* .coverage.*
.pytest_cache .pytest_cache
test.py test.py
\ No newline at end of file output
This diff is collapsed.
This diff is collapsed.
File deleted
This diff is collapsed.
from semantic_router.encoders.base import BaseEncoder from semantic_router.encoders.base import BaseEncoder
from semantic_router.encoders.bm25 import BM25Encoder from semantic_router.encoders.bm25 import BM25Encoder
from semantic_router.encoders.cohere import CohereEncoder from semantic_router.encoders.cohere import CohereEncoder
from semantic_router.encoders.openai import OpenAIEncoder
from semantic_router.encoders.fastembed import FastEmbedEncoder from semantic_router.encoders.fastembed import FastEmbedEncoder
from semantic_router.encoders.openai import OpenAIEncoder
__all__ = [ __all__ = [
"BaseEncoder", "BaseEncoder",
......
from typing import Any, List, Optional from typing import Any, List, Optional
import numpy as np import numpy as np
from pydantic import BaseModel, PrivateAttr from pydantic import BaseModel, PrivateAttr
......
...@@ -107,15 +107,24 @@ class LayerConfig: ...@@ -107,15 +107,24 @@ class LayerConfig:
"""Save the routes to a file in JSON or YAML format""" """Save the routes to a file in JSON or YAML format"""
logger.info(f"Saving route config to {path}") logger.info(f"Saving route config to {path}")
_, ext = os.path.splitext(path) _, ext = os.path.splitext(path)
# Check file extension before creating directories or files
if ext not in [".json", ".yaml", ".yml"]:
raise ValueError(
"Unsupported file type. Only .json and .yaml are supported"
)
dir_name = os.path.dirname(path)
# Create the directory if it doesn't exist and dir_name is not an empty string
if dir_name and not os.path.exists(dir_name):
os.makedirs(dir_name)
with open(path, "w") as f: with open(path, "w") as f:
if ext == ".json": if ext == ".json":
json.dump(self.to_dict(), f, indent=4) json.dump(self.to_dict(), f, indent=4)
elif ext in [".yaml", ".yml"]: elif ext in [".yaml", ".yml"]:
yaml.safe_dump(self.to_dict(), f) yaml.safe_dump(self.to_dict(), f)
else:
raise ValueError(
"Unsupported file type. Only .json and .yaml are supported"
)
def add(self, route: Route): def add(self, route: Route):
self.routes.append(route) self.routes.append(route)
......
...@@ -70,6 +70,7 @@ class Route(BaseModel): ...@@ -70,6 +70,7 @@ class Route(BaseModel):
""" """
schema = function_call.get_schema(item=entity) schema = function_call.get_schema(item=entity)
dynamic_route = cls._generate_dynamic_route(function_schema=schema) dynamic_route = cls._generate_dynamic_route(function_schema=schema)
dynamic_route.function_schema = schema
return dynamic_route return dynamic_route
@classmethod @classmethod
......
...@@ -8,7 +8,6 @@ from semantic_router.encoders import ( ...@@ -8,7 +8,6 @@ from semantic_router.encoders import (
CohereEncoder, CohereEncoder,
OpenAIEncoder, OpenAIEncoder,
) )
from semantic_router.utils.splitters import semantic_splitter from semantic_router.utils.splitters import semantic_splitter
......
...@@ -4,6 +4,7 @@ from typing import Any, Callable, Union ...@@ -4,6 +4,7 @@ from typing import Any, Callable, Union
from pydantic import BaseModel from pydantic import BaseModel
from semantic_router.schema import RouteChoice
from semantic_router.utils.llm import llm from semantic_router.utils.llm import llm
from semantic_router.utils.logger import logger from semantic_router.utils.logger import logger
...@@ -105,23 +106,14 @@ def is_valid_inputs(inputs: dict[str, Any], function_schema: dict[str, Any]) -> ...@@ -105,23 +106,14 @@ def is_valid_inputs(inputs: dict[str, Any], function_schema: dict[str, Any]) ->
return False return False
def call_function(function: Callable, inputs: dict[str, str]):
try:
return function(**inputs)
except TypeError as e:
logger.error(f"Error calling function: {e}")
# TODO: Add route layer object to the input, solve circular import issue # TODO: Add route layer object to the input, solve circular import issue
async def route_and_execute(query: str, functions: list[Callable], route_layer): async def route_and_execute(query: str, functions: list[Callable], layer) -> Any:
function_name = route_layer(query) route_choice: RouteChoice = layer(query)
if not function_name:
logger.warning("No function found, calling LLM...")
return llm(query)
for function in functions: for function in functions:
if function.__name__ == function_name: if function.__name__ == route_choice.name:
print(f"Calling function: {function.__name__}") if route_choice.function_call:
schema = get_schema(function) return function(**route_choice.function_call)
inputs = extract_function_inputs(query, schema)
call_function(function, inputs) logger.warning("No function found, calling LLM.")
return llm(query)
import numpy as np import numpy as np
from semantic_router.encoders import BaseEncoder from semantic_router.encoders import BaseEncoder
...@@ -13,13 +14,15 @@ def semantic_splitter( ...@@ -13,13 +14,15 @@ def semantic_splitter(
Method 1: "consecutive_similarity_drop" - This method splits documents based on Method 1: "consecutive_similarity_drop" - This method splits documents based on
the changes in similarity scores between consecutive documents. the changes in similarity scores between consecutive documents.
Method 2: "cumulative_similarity_drop" - This method segments the documents based on the Method 2: "cumulative_similarity_drop" - This method segments the documents based
changes in cumulative similarity score of the documents within the same split. on the changes in cumulative similarity score of the documents within the same
split.
Args: Args:
encoder (BaseEncoder): Encoder for document embeddings. encoder (BaseEncoder): Encoder for document embeddings.
docs (list[str]): Documents to split. docs (list[str]): Documents to split.
threshold (float): The similarity drop value that will trigger a new document split. threshold (float): The similarity drop value that will trigger a new document
split.
split_method (str): The method to use for splitting. split_method (str): The method to use for splitting.
Returns: Returns:
...@@ -64,7 +67,8 @@ def semantic_splitter( ...@@ -64,7 +67,8 @@ def semantic_splitter(
else: else:
raise ValueError( raise ValueError(
"Invalid 'split_method'. Choose either 'consecutive_similarity_drop' or 'cumulative_similarity_drop'." "Invalid 'split_method'. Choose either 'consecutive_similarity_drop' or"
" 'cumulative_similarity_drop'."
) )
splits[f"split {curr_split_num}"] = docs[curr_split_start_idx:] splits[f"split {curr_split_num}"] = docs[curr_split_start_idx:]
......
import os import os
import tempfile
from unittest.mock import mock_open, patch from unittest.mock import mock_open, patch
import pytest import pytest
...@@ -175,28 +176,30 @@ class TestRouteLayer: ...@@ -175,28 +176,30 @@ class TestRouteLayer:
assert route_layer.score_threshold == 0.82 assert route_layer.score_threshold == 0.82
def test_json(self, openai_encoder, routes): def test_json(self, openai_encoder, routes):
os.environ["OPENAI_API_KEY"] = "test_api_key" with tempfile.NamedTemporaryFile(suffix=".yaml", delete=False) as temp:
route_layer = RouteLayer(encoder=openai_encoder, routes=routes) os.environ["OPENAI_API_KEY"] = "test_api_key"
route_layer.to_json("test_output.json") route_layer = RouteLayer(encoder=openai_encoder, routes=routes)
assert os.path.exists("test_output.json") route_layer.to_json(temp.name)
route_layer_from_file = RouteLayer.from_json("test_output.json") assert os.path.exists(temp.name)
assert ( route_layer_from_file = RouteLayer.from_json(temp.name)
route_layer_from_file.index is not None assert (
and route_layer_from_file.categories is not None route_layer_from_file.index is not None
) and route_layer_from_file.categories is not None
os.remove("test_output.json") )
os.remove(temp.name)
def test_yaml(self, openai_encoder, routes): def test_yaml(self, openai_encoder, routes):
os.environ["OPENAI_API_KEY"] = "test_api_key" with tempfile.NamedTemporaryFile(suffix=".yaml", delete=False) as temp:
route_layer = RouteLayer(encoder=openai_encoder, routes=routes) os.environ["OPENAI_API_KEY"] = "test_api_key"
route_layer.to_yaml("test_output.yaml") route_layer = RouteLayer(encoder=openai_encoder, routes=routes)
assert os.path.exists("test_output.yaml") route_layer.to_yaml(temp.name)
route_layer_from_file = RouteLayer.from_yaml("test_output.yaml") assert os.path.exists(temp.name)
assert ( route_layer_from_file = RouteLayer.from_yaml(temp.name)
route_layer_from_file.index is not None assert (
and route_layer_from_file.categories is not None route_layer_from_file.index is not None
) and route_layer_from_file.categories is not None
os.remove("test_output.yaml") )
os.remove(temp.name)
def test_config(self, openai_encoder, routes): def test_config(self, openai_encoder, routes):
os.environ["OPENAI_API_KEY"] = "test_api_key" os.environ["OPENAI_API_KEY"] = "test_api_key"
......
import pytest
from unittest.mock import Mock from unittest.mock import Mock
from semantic_router.utils.splitters import semantic_splitter
import pytest
from semantic_router.schema import Conversation, Message from semantic_router.schema import Conversation, Message
from semantic_router.utils.splitters import semantic_splitter
def test_semantic_splitter_consecutive_similarity_drop(): def test_semantic_splitter_consecutive_similarity_drop():
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment