Skip to content
Snippets Groups Projects
Commit a1370a5f authored by Simonas's avatar Simonas
Browse files

working function calling with Mistral

parent c85f0c91
No related branches found
No related tags found
No related merge requests found
%% Cell type:code id: tags:
``` python
# https://platform.openai.com/docs/guides/function-calling
```
%% Cell type:markdown id: tags:
## Define LLMs
%% Cell type:code id: tags:
``` python
# OpenAI
import openai
from semantic_router.utils.logger import logger
def llm_openai(prompt: str, model: str = "gpt-4") -> str:
try:
response = openai.chat.completions.create(
model=model,
messages=[
{"role": "system", "content": f"{prompt}"},
],
)
ai_message = response.choices[0].message.content
if not ai_message:
raise Exception("AI message is empty", ai_message)
logger.info(f"AI message: {ai_message}")
return ai_message
except Exception as e:
raise Exception("Failed to call OpenAI API", e)
```
%% Cell type:code id: tags:
``` python
# Mistral
import os
import requests
# Docs https://huggingface.co/docs/transformers/main_classes/text_generation
HF_API_TOKEN = os.environ["HF_API_TOKEN"]
def llm_mistral(prompt: str) -> str:
api_url = "https://z5t4cuhg21uxfmc3.us-east-1.aws.endpoints.huggingface.cloud/"
headers = {
"Authorization": f"Bearer {HF_API_TOKEN}",
"Content-Type": "application/json",
}
response = requests.post(
api_url,
headers=headers,
json={
"inputs": prompt,
"parameters": {
"max_new_tokens": 200,
"temperature": 0.2,
},
},
)
if response.status_code != 200:
raise Exception("Failed to call HuggingFace API", response.text)
return response.json()[0]['generated_text']
```
%% Cell type:markdown id: tags:
### Now we need to generate config from function specification with `GPT-4`
%% Cell type:code id: tags:
``` python
import json
from semantic_router.utils.logger import logger
def generate_config(specification: dict) -> dict:
logger.info("Generating config...")
example_specification = (
{
"type": "function",
"function": {
"name": "get_current_weather",
"description": "Get the current weather",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA",
},
"format": {
"type": "string",
"enum": ["celsius", "fahrenheit"],
"description": "The temperature unit to use. Infer this "
" from the users location.",
},
},
"required": ["location", "format"],
},
},
},
)
example_config = {
"name": "get_weather",
"utterances": [
"What is the weather like in SF?",
"What is the weather in Cyprus?",
"weather in London?",
"Tell me the weather in New York",
"what is the current weather in Paris?",
],
}
prompt = f"""
Given the following specification, generate a config in a valid JSON format
enclosed in double quotes,
Example:
SPECIFICATION:
{example_specification}
CONFIG:
{example_config}
GIVEN SPECIFICATION:
{specification}
GENERATED CONFIG:
"""
ai_message = llm_openai(prompt)
try:
route_config = json.loads(ai_message)
function_description = specification["function"]["description"]
route_config["utterances"].append(function_description)
logger.info(f"Generated config: {route_config}")
return route_config
except json.JSONDecodeError as json_error:
raise Exception("JSON parsing error", json_error)
```
%% Cell type:markdown id: tags:
Extract function parameters using `Mistal` open-source model
%% Cell type:code id: tags:
``` python
def extract_parameters(query: str, specification: dict) -> dict:
logger.info("Extracting parameters...")
example_query = "what is the weather in London?"
example_specification = {
"type": "function",
"function": {
"name": "get_time",
"description": "Get the current time",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "Example of city and state",
},
},
"required": ["location"],
},
},
}
example_parameters = {
"location": "London",
}
prompt = f"""
Given the following specification and query, extract the parameters from the query,
in a valid JSON format enclosed in double quotes.
Example:
SPECIFICATION:
{example_specification}
QUERY:
{example_query}
PARAMETERS:
{example_parameters}
GIVEN SPECIFICATION:
{specification}
GIVEN QUERY:
{query}
EXTRACTED PARAMETERS:
"""
ai_message = llm_openai(prompt)
# ai_message = llm_openai(prompt)
ai_message = llm_mistral(prompt)
print(ai_message)
try:
parameters = json.loads(ai_message)
logger.info(f"Extracted parameters: {parameters}")
return parameters
except json.JSONDecodeError as json_error:
raise Exception("JSON parsing error", json_error)
```
%% Cell type:code id: tags:
``` python
def validate_parameters(function_parameters, specification):
required_params = specification["function"]["parameters"]["required"]
missing_params = [
param for param in required_params if param not in function_parameters
]
if missing_params:
raise ValueError(f"Missing required parameters: {missing_params}")
return True
```
%% Cell type:markdown id: tags:
Set up the routing layer
%% Cell type:code id: tags:
``` python
from semantic_router.schema import Route
from semantic_router.encoders import CohereEncoder
from semantic_router.layer import RouteLayer
from semantic_router.utils.logger import logger
def get_route_layer(config: list[dict]) -> RouteLayer:
logger.info("Getting route layer...")
encoder = CohereEncoder()
routes = [
Route(name=route["name"], utterances=route["utterances"]) for route in config
]
return RouteLayer(encoder=encoder, routes=routes)
```
%% Cell type:code id: tags:
%% Cell type:markdown id: tags:
``` python
def validate_parameters(function_parameters, specification):
required_params = specification["function"]["parameters"]["required"]
missing_params = [
param for param in required_params if param not in function_parameters
]
if missing_params:
raise ValueError(f"Missing required parameters: {missing_params}")
return True
```
### Workflow
%% Cell type:code id: tags:
``` python
def get_time(location: str) -> str:
print(f"Calling get_time function with location: {location}")
return "get_time"
specification = {
get_time_spec = {
"type": "function",
"function": {
"name": "get_time",
"description": "Get the current time",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state",
},
},
"required": ["location"],
},
},
}
route_config = generate_config(specification)
route_config = generate_config(get_time_spec)
route_layer = get_route_layer([route_config])
queries = [
"What is the weather like in Barcelona?",
"What time is it in Taiwan?",
"What is happening in the world?",
"what is the time in Kaunas?",
"Im bored",
"I want to play a game",
"Banana",
]
print("Getting function name for queries:\n")
# Calling functions
for query in queries:
function_name = route_layer(query)
function_parameters = {}
if function_name:
function_parameters = extract_parameters(query, specification)
print(query, function_name, function_parameters)
if function_name == "get_time":
function_parameters = extract_parameters(query, get_time_spec)
try:
if validate_parameters(function_parameters, specification):
if validate_parameters(function_parameters, get_time_spec):
get_time(**function_parameters)
except ValueError as e:
logger.error(f"Error: {e}")
```
%% Output
2023-12-14 13:16:49 INFO semantic_router.utils.logger Generating config...
2023-12-14 13:16:54 INFO semantic_router.utils.logger AI message: {"name": "get_time", "utterances": ["What is the current time in London?", "Tell me the time in New York", "What's happening now in Paris?", "time in San Francisco?", "Tell me the time in Sydney"]}
2023-12-14 13:16:54 INFO semantic_router.utils.logger Generated config: {'name': 'get_time', 'utterances': ['What is the current time in London?', 'Tell me the time in New York', "What's happening now in Paris?", 'time in San Francisco?', 'Tell me the time in Sydney', 'Get the current time']}
2023-12-14 13:16:54 INFO semantic_router.utils.logger Getting route layer...
Getting function name for queries:
What is the weather like in Barcelona? None {}
2023-12-14 13:16:55 INFO semantic_router.utils.logger Extracting parameters...
2023-12-14 13:16:56 INFO semantic_router.utils.logger AI message: {"location": "Taiwan"}
2023-12-14 13:16:56 INFO semantic_router.utils.logger Extracted parameters: {'location': 'Taiwan'}
What time is it in Taiwan? get_time {'location': 'Taiwan'}
Calling get_time function with location: Taiwan
2023-12-14 13:16:56 INFO semantic_router.utils.logger Extracting parameters...
2023-12-14 13:16:58 INFO semantic_router.utils.logger AI message: {"location": "the world"}
2023-12-14 13:16:58 INFO semantic_router.utils.logger Extracted parameters: {'location': 'the world'}
What is happening in the world? get_time {'location': 'the world'}
Calling get_time function with location: the world
2023-12-14 13:16:58 INFO semantic_router.utils.logger Extracting parameters...
2023-12-14 13:17:00 INFO semantic_router.utils.logger AI message: {"location": "Kaunas"}
2023-12-14 13:17:00 INFO semantic_router.utils.logger Extracted parameters: {'location': 'Kaunas'}
what is the time in Kaunas? get_time {'location': 'Kaunas'}
Calling get_time function with location: Kaunas
Im bored None {}
I want to play a game None {}
Banana None {}
%% Cell type:code id: tags:
``` python
```
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment