Skip to content
Snippets Groups Projects
Commit bbfa83a3 authored by Arash Mosharraf's avatar Arash Mosharraf
Browse files

removed the test notebook

parent c261e62c
No related branches found
No related tags found
No related merge requests found
%% Cell type:code id: tags:
``` python
import sys
import os
current_script_path = os.getcwd()
parent_dir = os.path.dirname(current_script_path)
#subfolder_path = os.path.join(parent_dir)
print(parent_dir)
# adding Folder_2 to the system path
sys.path.insert(0, parent_dir)
# importing the add and odd_even
# function
from semantic_router import Route
from semantic_router.encoders import CohereEncoder, OpenAIEncoder, AzureOpenAIEncoder
politics = Route(
name="politics",
utterances=[
"isn't politics the best thing ever",
"why don't you tell me about your political opinions",
"don't you just love the president",
"don't you just hate the president",
"they're going to destroy this country!",
"they will save the country!",
],
)
```
%% Output
c:\Users\armoshar\OneDrive - Microsoft\Projects\OpenAI\semantic-router
%% Cell type:code id: tags:
``` python
from dotenv import load_dotenv
import os
load_dotenv()
api_key = os.getenv("AZURE_OPENAI_API_KEY")
azure_endpoint = os.getenv("AZURE_OPENAI_ENDPOINT")
deployment = os.getenv("AZURE_OPENAI_DEPLOYMENT_NAME")
encoder = AzureOpenAIEncoder(api_key=api_key, azure_endpoint=azure_endpoint, api_version="2023-07-01-preview",
model="text-embedding-ada-002" )
```
%% Cell type:code id: tags:
``` python
def get_direction(start : str)->str:
"""just produce a direction from the starting point to the library
:param start: the starting address
:type start: str
:return: the direction
"""
print("From main street to the library: go left and right and you get there in 5 minutes")
def do_fuzzy_case()->str:
"""Handle the fuzzy case questions
return: the text
"""
print("I need mroe information to help you with that")
def do_irrelevant()->str:
"""Handle the irrelevant questions
return: the text
"""
print("I don't know how to help you with that")
```
%% Cell type:code id: tags:
``` python
from semantic_router.utils.function_call import get_schema
direction_schema = get_schema(get_direction)
fuzzy_schema = get_schema(do_fuzzy_case)
irrelevant_schema = get_schema(do_irrelevant)
```
%% Cell type:code id: tags:
``` python
direction_schema
```
%% Output
{'name': 'get_direction',
'description': 'just produce a direction from the starting point to the library\n:param start: the starting address\n:type start: str\n\n\n:return: the direction',
'signature': '(start) -> str',
'output': "<class 'str'>"}
%% Cell type:code id: tags:
``` python
direction = Route(
name="get_direction",
utterances=[
"How do I get to the closes park?",
"I want to go to the nearest restaurant",
"I want to go to the nearest restaurant from my location",
"I want to go to the nearest restaurant from my location to my home",
"I want to go to a library in city of Dallas",
"How do I get to the mall?",
"find me the best route to the closes Chines restaurant",
"How far is the drive from my mom's house to the nearest grocery store",
],
function_schema=direction_schema,
)
fuzzycase = Route(
name="do_fuzzy_case",
utterances=[
"I want to go to the nearest restaurant from my location to my home but I want to stop by the grocery store",
"How do I get to a different continent from my home",
"I want to go to the libary but I want to stop by the grocery store and then go to the nearest restaurant and come back",
"City of Austin to Los Angeles",
"Ski Slopes",
],
function_schema=fuzzy_schema,
)
irrelevant = Route(
name="do_irrelevant",
utterances=[
"How do I make grilled lobster",
"What is the best book for finding the driving directions",
"What is the best song",
"Can you tell me the most popular color in the world",
"Tell me a joke",
],
function_schema=irrelevant_schema,
)
routes = [direction, fuzzycase, irrelevant ]
```
%% Cell type:code id: tags:
``` python
```
%% Cell type:code id: tags:
``` python
from semantic_router.layer import RouteLayer
from semantic_router.llms import AzureOpenAILLM
llm = AzureOpenAILLM(openai_api_key=api_key, azure_endpoint=azure_endpoint)
rl = RouteLayer(encoder=encoder, routes=routes, llm=llm)
```
%% Output
2024-01-16 15:24:32 INFO semantic_router.utils.logger Initializing RouteLayer
%% Cell type:code id: tags:
``` python
out = rl("how do I get to the nearest gas station from my home?")
```
%% Output
2024-01-16 15:24:33 INFO semantic_router.utils.logger LLM `name='gpt-35-turbo' client=<openai.lib.azure.AzureOpenAI object at 0x000002324D651630> temperature=0.01 max_tokens=200` is chosen
2024-01-16 15:24:33 INFO semantic_router.utils.logger this is the llm passed to route object name='gpt-35-turbo' client=<openai.lib.azure.AzureOpenAI object at 0x000002324D651630> temperature=0.01 max_tokens=200
2024-01-16 15:24:33 INFO semantic_router.utils.logger Extracting function input...
2024-01-16 15:24:33 INFO semantic_router.utils.logger LLM output: {
"start": "my home"
}
2024-01-16 15:24:33 INFO semantic_router.utils.logger Function inputs: {'start': 'my home'}
2024-01-16 15:24:33 INFO semantic_router.utils.logger param info ['start) -> st']
2024-01-16 15:24:33 INFO semantic_router.utils.logger param names ['start) -> st']
2024-01-16 15:24:33 ERROR semantic_router.utils.logger Input validation error: list index out of range
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
Cell In[8], line 1
----> 1 out = rl("how do I get to the nearest gas station from my home?")
File c:\Users\armoshar\OneDrive - Microsoft\Projects\OpenAI\semantic-router\semantic_router\layer.py:203, in RouteLayer.__call__(self, text)
201 route.llm = self.llm
202 logger.info(f"LLM `{route.llm}` is chosen")
--> 203 return route(text)
204 else:
205 # if no route passes threshold, return empty route choice
206 return RouteChoice()
File c:\Users\armoshar\OneDrive - Microsoft\Projects\OpenAI\semantic-router\semantic_router\route.py:57, in Route.__call__(self, query)
52 raise ValueError(
53 "LLM is required for dynamic routes. Please ensure the `llm` "
54 "attribute is set."
55 )
56 # if a function schema is provided we generate the inputs
---> 57 extracted_inputs = self.llm.extract_function_inputs(
58 query=query, function_schema=self.function_schema
59 )
60 logger.info(f"extracted inputs {extracted_inputs}")
61 func_call = extracted_inputs
File c:\Users\armoshar\OneDrive - Microsoft\Projects\OpenAI\semantic-router\semantic_router\llms\base.py:90, in BaseLLM.extract_function_inputs(self, query, function_schema)
88 logger.info(f"Function inputs: {function_inputs}")
89 if not self._is_valid_inputs(function_inputs, function_schema):
---> 90 raise ValueError("Invalid inputs")
91 return function_inputs
ValueError: Invalid inputs
%% Cell type:code id: tags:
``` python
from pydantic import BaseModel
class Message(BaseModel):
role: str
content: str
def to_openai(self):
if self.role.lower() not in ["user", "assistant", "system"]:
raise ValueError("Role must be either 'user', 'assistant' or 'system'")
return {"role": self.role, "content": self.content}
def to_cohere(self):
return {"role": self.role, "message": self.content}
def to_llamacpp(self):
return {"role": self.role, "content": self.content}
import json
import logging
query="""
"How do I make grilled lobster",
"What is the best book for finding the driving directions",
"What is the best song",
"Can you tell me the most popular color in the world",
"Tell me a joke",
"""
function_schema = irrelevant_schema
logging.info("Extracting function input...")
prompt = f"""
You are a helpful assistant designed to output JSON.
Given the following function schema
<< {function_schema} >>
and query
<< {query} >>
extract the parameters values from the query, in a valid JSON format.
Example:
Input:
query: "How is the weather in Hawaii right now in International units?"
schema:
{{
"name": "get_weather",
"description": "Useful to get the weather in a specific location",
"signature": "(location: str, degree: str) -> str",
"output": "<class 'str'>",
}}
Result: {{
"location": "London",
"degree": "Celsius",
}}
Input:
query: {query}
schema: {function_schema}
Result:
"""
llm_input = [Message(role="user", content=prompt)]
output = llm_input
print(output)
output = output.replace("'", '"').strip().rstrip(",")
function_inputs = json.loads(output)
function_inputs
```
%% Output
[Message(role='user', content='\nYou are a helpful assistant designed to output JSON.\nGiven the following function schema\n<< {\'name\': \'do_irrelevant\', \'description\': \'Handle the irrelevant questions \\n\\nreturn: the text\', \'signature\': \'() -> str\', \'output\': "<class \'str\'>"} >>\nand query\n<< \n "How do I make grilled lobster",\n "What is the best book for finding the driving directions",\n "What is the best song",\n "Can you tell me the most popular color in the world",\n "Tell me a joke", \n >>\nextract the parameters values from the query, in a valid JSON format.\nExample:\nInput:\nquery: "How is the weather in Hawaii right now in International units?"\nschema:\n{\n "name": "get_weather",\n "description": "Useful to get the weather in a specific location",\n "signature": "(location: str, degree: str) -> str",\n "output": "<class \'str\'>",\n}\n\nResult: {\n "location": "London",\n "degree": "Celsius",\n}\n\nInput:\nquery: \n "How do I make grilled lobster",\n "What is the best book for finding the driving directions",\n "What is the best song",\n "Can you tell me the most popular color in the world",\n "Tell me a joke", \n \nschema: {\'name\': \'do_irrelevant\', \'description\': \'Handle the irrelevant questions \\n\\nreturn: the text\', \'signature\': \'() -> str\', \'output\': "<class \'str\'>"}\nResult:\n')]
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
Cell In[15], line 60
57 output = llm_input
59 print(output)
---> 60 output = output.replace("'", '"').strip().rstrip(",")
62 function_inputs = json.loads(output)
64 function_inputs
AttributeError: 'list' object has no attribute 'replace'
%% Cell type:code id: tags:
``` python
```
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment