Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
Semantic Router
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Iterations
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Locked files
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Package registry
Container Registry
Model registry
Operate
Environments
Terraform modules
Monitor
Incidents
Service Desk
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Code review analytics
Issue analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Terms and privacy
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
mirrored_repos
MachineLearning
aurelio-labs
Semantic Router
Commits
573519cc
Commit
573519cc
authored
1 year ago
by
Simonas
Browse files
Options
Downloads
Patches
Plain Diff
linting
parent
cd4d798b
No related branches found
No related tags found
No related merge requests found
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
coverage.xml
+1
-1
1 addition, 1 deletion
coverage.xml
docs/examples/function_calling.ipynb
+11
-13
11 additions, 13 deletions
docs/examples/function_calling.ipynb
with
12 additions
and
14 deletions
coverage.xml
+
1
−
1
View file @
573519cc
<?xml version="1.0" ?>
<?xml version="1.0" ?>
<coverage
version=
"7.3.2"
timestamp=
"1702
538
160
01
9"
lines-valid=
"344"
lines-covered=
"344"
line-rate=
"1"
branches-covered=
"0"
branches-valid=
"0"
branch-rate=
"0"
complexity=
"0"
>
<coverage
version=
"7.3.2"
timestamp=
"1702
6339
160
6
9"
lines-valid=
"344"
lines-covered=
"344"
line-rate=
"1"
branches-covered=
"0"
branches-valid=
"0"
branch-rate=
"0"
complexity=
"0"
>
<!-- Generated by coverage.py: https://coverage.readthedocs.io/en/7.3.2 -->
<!-- Generated by coverage.py: https://coverage.readthedocs.io/en/7.3.2 -->
<!-- Based on https://raw.githubusercontent.com/cobertura/web/master/htdocs/xml/coverage-04.dtd -->
<!-- Based on https://raw.githubusercontent.com/cobertura/web/master/htdocs/xml/coverage-04.dtd -->
<sources>
<sources>
...
...
This diff is collapsed.
Click to expand it.
docs/examples/function_calling.ipynb
+
11
−
13
View file @
573519cc
...
@@ -17,6 +17,7 @@
...
@@ -17,6 +17,7 @@
"import openai\n",
"import openai\n",
"from semantic_router.utils.logger import logger\n",
"from semantic_router.utils.logger import logger\n",
"\n",
"\n",
"\n",
"# Docs # https://platform.openai.com/docs/guides/function-calling\n",
"# Docs # https://platform.openai.com/docs/guides/function-calling\n",
"def llm_openai(prompt: str, model: str = \"gpt-4\") -> str:\n",
"def llm_openai(prompt: str, model: str = \"gpt-4\") -> str:\n",
" try:\n",
" try:\n",
...
@@ -49,6 +50,7 @@
...
@@ -49,6 +50,7 @@
"# Docs https://huggingface.co/docs/transformers/main_classes/text_generation\n",
"# Docs https://huggingface.co/docs/transformers/main_classes/text_generation\n",
"HF_API_TOKEN = os.environ[\"HF_API_TOKEN\"]\n",
"HF_API_TOKEN = os.environ[\"HF_API_TOKEN\"]\n",
"\n",
"\n",
"\n",
"def llm_mistral(prompt: str) -> str:\n",
"def llm_mistral(prompt: str) -> str:\n",
" api_url = \"https://z5t4cuhg21uxfmc3.us-east-1.aws.endpoints.huggingface.cloud/\"\n",
" api_url = \"https://z5t4cuhg21uxfmc3.us-east-1.aws.endpoints.huggingface.cloud/\"\n",
" headers = {\n",
" headers = {\n",
...
@@ -71,9 +73,9 @@
...
@@ -71,9 +73,9 @@
" if response.status_code != 200:\n",
" if response.status_code != 200:\n",
" raise Exception(\"Failed to call HuggingFace API\", response.text)\n",
" raise Exception(\"Failed to call HuggingFace API\", response.text)\n",
"\n",
"\n",
" ai_message = response.json()[0][
'
generated_text
'
]\n",
" ai_message = response.json()[0][
\"
generated_text
\"
]\n",
" if not ai_message:\n",
" if not ai_message:\n",
"
raise Exception(\"AI message is empty\", ai_message)\n",
" raise Exception(\"AI message is empty\", ai_message)\n",
" logger.info(f\"AI message: {ai_message}\")\n",
" logger.info(f\"AI message: {ai_message}\")\n",
" return ai_message"
" return ai_message"
]
]
...
@@ -94,6 +96,7 @@
...
@@ -94,6 +96,7 @@
"import inspect\n",
"import inspect\n",
"from typing import Any\n",
"from typing import Any\n",
"\n",
"\n",
"\n",
"def get_function_schema(function) -> dict[str, Any]:\n",
"def get_function_schema(function) -> dict[str, Any]:\n",
" schema = {\n",
" schema = {\n",
" \"name\": function.__name__,\n",
" \"name\": function.__name__,\n",
...
@@ -116,6 +119,7 @@
...
@@ -116,6 +119,7 @@
"\n",
"\n",
"from semantic_router.utils.logger import logger\n",
"from semantic_router.utils.logger import logger\n",
"\n",
"\n",
"\n",
"def generate_route(function) -> dict:\n",
"def generate_route(function) -> dict:\n",
" logger.info(\"Generating config...\")\n",
" logger.info(\"Generating config...\")\n",
" example_schema = {\n",
" example_schema = {\n",
...
@@ -249,6 +253,7 @@
...
@@ -249,6 +253,7 @@
"from semantic_router.layer import RouteLayer\n",
"from semantic_router.layer import RouteLayer\n",
"from semantic_router.utils.logger import logger\n",
"from semantic_router.utils.logger import logger\n",
"\n",
"\n",
"\n",
"def create_router(routes: list[dict]) -> RouteLayer:\n",
"def create_router(routes: list[dict]) -> RouteLayer:\n",
" logger.info(\"Creating route layer...\")\n",
" logger.info(\"Creating route layer...\")\n",
" encoder = CohereEncoder()\n",
" encoder = CohereEncoder()\n",
...
@@ -279,6 +284,7 @@
...
@@ -279,6 +284,7 @@
"source": [
"source": [
"from typing import Callable\n",
"from typing import Callable\n",
"\n",
"\n",
"\n",
"def call_function(function: Callable, parameters: dict[str, str]):\n",
"def call_function(function: Callable, parameters: dict[str, str]):\n",
" try:\n",
" try:\n",
" return function(**parameters)\n",
" return function(**parameters)\n",
...
@@ -410,17 +416,9 @@
...
@@ -410,17 +416,9 @@
}
}
],
],
"source": [
"source": [
"call(query=\"What is the time in Stockholm?\",\n",
"call(query=\"What is the time in Stockholm?\", functions=tools, router=router)\n",
" functions=tools,\n",
"call(query=\"What is the tech news in the Lithuania?\", functions=tools, router=router)\n",
" router=router)\n",
"call(query=\"Hi!\", functions=tools, router=router)"
"call(\n",
" query=\"What is the tech news in the Lithuania?\",\n",
" functions=tools,\n",
" router=router)\n",
"call(\n",
" query=\"Hi!\",\n",
" functions=tools,\n",
" router=router)\n"
]
]
}
}
],
],
...
...
%% Cell type:markdown id: tags:
%% Cell type:markdown id: tags:
## Define LLMs
## Define LLMs
%% Cell type:code id: tags:
%% Cell type:code id: tags:
```
python
```
python
# OpenAI
# OpenAI
import
openai
import
openai
from
semantic_router.utils.logger
import
logger
from
semantic_router.utils.logger
import
logger
# Docs # https://platform.openai.com/docs/guides/function-calling
# Docs # https://platform.openai.com/docs/guides/function-calling
def
llm_openai
(
prompt
:
str
,
model
:
str
=
"
gpt-4
"
)
->
str
:
def
llm_openai
(
prompt
:
str
,
model
:
str
=
"
gpt-4
"
)
->
str
:
try
:
try
:
logger
.
info
(
f
"
Calling
{
model
}
model
"
)
logger
.
info
(
f
"
Calling
{
model
}
model
"
)
response
=
openai
.
chat
.
completions
.
create
(
response
=
openai
.
chat
.
completions
.
create
(
model
=
model
,
model
=
model
,
messages
=
[
messages
=
[
{
"
role
"
:
"
system
"
,
"
content
"
:
f
"
{
prompt
}
"
},
{
"
role
"
:
"
system
"
,
"
content
"
:
f
"
{
prompt
}
"
},
],
],
)
)
ai_message
=
response
.
choices
[
0
].
message
.
content
ai_message
=
response
.
choices
[
0
].
message
.
content
if
not
ai_message
:
if
not
ai_message
:
raise
Exception
(
"
AI message is empty
"
,
ai_message
)
raise
Exception
(
"
AI message is empty
"
,
ai_message
)
logger
.
info
(
f
"
AI message:
{
ai_message
}
"
)
logger
.
info
(
f
"
AI message:
{
ai_message
}
"
)
return
ai_message
return
ai_message
except
Exception
as
e
:
except
Exception
as
e
:
raise
Exception
(
"
Failed to call OpenAI API
"
,
e
)
raise
Exception
(
"
Failed to call OpenAI API
"
,
e
)
```
```
%% Cell type:code id: tags:
%% Cell type:code id: tags:
```
python
```
python
# Mistral
# Mistral
import
os
import
os
import
requests
import
requests
# Docs https://huggingface.co/docs/transformers/main_classes/text_generation
# Docs https://huggingface.co/docs/transformers/main_classes/text_generation
HF_API_TOKEN
=
os
.
environ
[
"
HF_API_TOKEN
"
]
HF_API_TOKEN
=
os
.
environ
[
"
HF_API_TOKEN
"
]
def
llm_mistral
(
prompt
:
str
)
->
str
:
def
llm_mistral
(
prompt
:
str
)
->
str
:
api_url
=
"
https://z5t4cuhg21uxfmc3.us-east-1.aws.endpoints.huggingface.cloud/
"
api_url
=
"
https://z5t4cuhg21uxfmc3.us-east-1.aws.endpoints.huggingface.cloud/
"
headers
=
{
headers
=
{
"
Authorization
"
:
f
"
Bearer
{
HF_API_TOKEN
}
"
,
"
Authorization
"
:
f
"
Bearer
{
HF_API_TOKEN
}
"
,
"
Content-Type
"
:
"
application/json
"
,
"
Content-Type
"
:
"
application/json
"
,
}
}
logger
.
info
(
"
Calling Mistral model
"
)
logger
.
info
(
"
Calling Mistral model
"
)
response
=
requests
.
post
(
response
=
requests
.
post
(
api_url
,
api_url
,
headers
=
headers
,
headers
=
headers
,
json
=
{
json
=
{
"
inputs
"
:
f
"
You are a helpful assistant, user query:
{
prompt
}
"
,
"
inputs
"
:
f
"
You are a helpful assistant, user query:
{
prompt
}
"
,
"
parameters
"
:
{
"
parameters
"
:
{
"
max_new_tokens
"
:
200
,
"
max_new_tokens
"
:
200
,
"
temperature
"
:
0.1
,
"
temperature
"
:
0.1
,
},
},
},
},
)
)
if
response
.
status_code
!=
200
:
if
response
.
status_code
!=
200
:
raise
Exception
(
"
Failed to call HuggingFace API
"
,
response
.
text
)
raise
Exception
(
"
Failed to call HuggingFace API
"
,
response
.
text
)
ai_message
=
response
.
json
()[
0
][
'
generated_text
'
]
ai_message
=
response
.
json
()[
0
][
"
generated_text
"
]
if
not
ai_message
:
if
not
ai_message
:
raise
Exception
(
"
AI message is empty
"
,
ai_message
)
raise
Exception
(
"
AI message is empty
"
,
ai_message
)
logger
.
info
(
f
"
AI message:
{
ai_message
}
"
)
logger
.
info
(
f
"
AI message:
{
ai_message
}
"
)
return
ai_message
return
ai_message
```
```
%% Cell type:markdown id: tags:
%% Cell type:markdown id: tags:
### Now we need to generate config from function schema using LLM
### Now we need to generate config from function schema using LLM
%% Cell type:code id: tags:
%% Cell type:code id: tags:
```
python
```
python
import
inspect
import
inspect
from
typing
import
Any
from
typing
import
Any
def
get_function_schema
(
function
)
->
dict
[
str
,
Any
]:
def
get_function_schema
(
function
)
->
dict
[
str
,
Any
]:
schema
=
{
schema
=
{
"
name
"
:
function
.
__name__
,
"
name
"
:
function
.
__name__
,
"
description
"
:
str
(
inspect
.
getdoc
(
function
)),
"
description
"
:
str
(
inspect
.
getdoc
(
function
)),
"
signature
"
:
str
(
inspect
.
signature
(
function
)),
"
signature
"
:
str
(
inspect
.
signature
(
function
)),
"
output
"
:
str
(
"
output
"
:
str
(
inspect
.
signature
(
function
).
return_annotation
,
inspect
.
signature
(
function
).
return_annotation
,
),
),
}
}
return
schema
return
schema
```
```
%% Cell type:code id: tags:
%% Cell type:code id: tags:
```
python
```
python
import
json
import
json
from
semantic_router.utils.logger
import
logger
from
semantic_router.utils.logger
import
logger
def
generate_route
(
function
)
->
dict
:
def
generate_route
(
function
)
->
dict
:
logger
.
info
(
"
Generating config...
"
)
logger
.
info
(
"
Generating config...
"
)
example_schema
=
{
example_schema
=
{
"
name
"
:
"
get_weather
"
,
"
name
"
:
"
get_weather
"
,
"
description
"
:
"
Useful to get the weather in a specific location
"
,
"
description
"
:
"
Useful to get the weather in a specific location
"
,
"
signature
"
:
"
(location: str) -> str
"
,
"
signature
"
:
"
(location: str) -> str
"
,
"
output
"
:
"
<class
'
str
'
>
"
,
"
output
"
:
"
<class
'
str
'
>
"
,
}
}
example_config
=
{
example_config
=
{
"
name
"
:
"
get_weather
"
,
"
name
"
:
"
get_weather
"
,
"
utterances
"
:
[
"
utterances
"
:
[
"
What is the weather like in SF?
"
,
"
What is the weather like in SF?
"
,
"
What is the weather in Cyprus?
"
,
"
What is the weather in Cyprus?
"
,
"
weather in London?
"
,
"
weather in London?
"
,
"
Tell me the weather in New York
"
,
"
Tell me the weather in New York
"
,
"
what is the current weather in Paris?
"
,
"
what is the current weather in Paris?
"
,
],
],
}
}
function_schema
=
get_function_schema
(
function
)
function_schema
=
get_function_schema
(
function
)
prompt
=
f
"""
prompt
=
f
"""
You are a helpful assistant designed to output JSON.
You are a helpful assistant designed to output JSON.
Given the following function schema
Given the following function schema
{
function_schema
}
{
function_schema
}
generate a routing config with the format:
generate a routing config with the format:
{
example_config
}
{
example_config
}
For example:
For example:
Input:
{
example_schema
}
Input:
{
example_schema
}
Output:
{
example_config
}
Output:
{
example_config
}
Input:
{
function_schema
}
Input:
{
function_schema
}
Output:
Output:
"""
"""
ai_message
=
llm_openai
(
prompt
)
ai_message
=
llm_openai
(
prompt
)
ai_message
=
ai_message
.
replace
(
"
CONFIG:
"
,
""
).
replace
(
"'"
,
'"'
).
strip
().
rstrip
(
"
,
"
)
ai_message
=
ai_message
.
replace
(
"
CONFIG:
"
,
""
).
replace
(
"'"
,
'"'
).
strip
().
rstrip
(
"
,
"
)
try
:
try
:
route_config
=
json
.
loads
(
ai_message
)
route_config
=
json
.
loads
(
ai_message
)
logger
.
info
(
f
"
Generated config:
{
route_config
}
"
)
logger
.
info
(
f
"
Generated config:
{
route_config
}
"
)
return
route_config
return
route_config
except
json
.
JSONDecodeError
as
json_error
:
except
json
.
JSONDecodeError
as
json_error
:
logger
.
error
(
f
"
JSON parsing error
{
json_error
}
"
)
logger
.
error
(
f
"
JSON parsing error
{
json_error
}
"
)
print
(
f
"
AI message:
{
ai_message
}
"
)
print
(
f
"
AI message:
{
ai_message
}
"
)
return
{
"
error
"
:
"
Failed to generate config
"
}
return
{
"
error
"
:
"
Failed to generate config
"
}
```
```
%% Cell type:markdown id: tags:
%% Cell type:markdown id: tags:
Extract function parameters using
`Mistral`
open-source model
Extract function parameters using
`Mistral`
open-source model
%% Cell type:code id: tags:
%% Cell type:code id: tags:
```
python
```
python
def
extract_parameters
(
query
:
str
,
function
)
->
dict
:
def
extract_parameters
(
query
:
str
,
function
)
->
dict
:
logger
.
info
(
"
Extracting parameters...
"
)
logger
.
info
(
"
Extracting parameters...
"
)
example_query
=
"
How is the weather in Hawaii right now in International units?
"
example_query
=
"
How is the weather in Hawaii right now in International units?
"
example_schema
=
{
example_schema
=
{
"
name
"
:
"
get_weather
"
,
"
name
"
:
"
get_weather
"
,
"
description
"
:
"
Useful to get the weather in a specific location
"
,
"
description
"
:
"
Useful to get the weather in a specific location
"
,
"
signature
"
:
"
(location: str, degree: str) -> str
"
,
"
signature
"
:
"
(location: str, degree: str) -> str
"
,
"
output
"
:
"
<class
'
str
'
>
"
,
"
output
"
:
"
<class
'
str
'
>
"
,
}
}
example_parameters
=
{
example_parameters
=
{
"
location
"
:
"
London
"
,
"
location
"
:
"
London
"
,
"
degree
"
:
"
Celsius
"
,
"
degree
"
:
"
Celsius
"
,
}
}
prompt
=
f
"""
prompt
=
f
"""
You are a helpful assistant designed to output JSON.
You are a helpful assistant designed to output JSON.
Given the following function schema
Given the following function schema
{
get_function_schema
(
function
)
}
{
get_function_schema
(
function
)
}
and query
and query
{
query
}
{
query
}
extract the parameters values from the query, in a valid JSON format.
extract the parameters values from the query, in a valid JSON format.
Example:
Example:
Input:
Input:
query:
{
example_query
}
query:
{
example_query
}
schema:
{
example_schema
}
schema:
{
example_schema
}
Output:
Output:
parameters:
{
example_parameters
}
parameters:
{
example_parameters
}
Input:
Input:
query:
{
query
}
query:
{
query
}
schema:
{
get_function_schema
(
function
)
}
schema:
{
get_function_schema
(
function
)
}
Output:
Output:
parameters:
parameters:
"""
"""
ai_message
=
llm_mistral
(
prompt
)
ai_message
=
llm_mistral
(
prompt
)
ai_message
=
ai_message
.
replace
(
"
CONFIG:
"
,
""
).
replace
(
"'"
,
'"'
).
strip
().
rstrip
(
"
,
"
)
ai_message
=
ai_message
.
replace
(
"
CONFIG:
"
,
""
).
replace
(
"'"
,
'"'
).
strip
().
rstrip
(
"
,
"
)
try
:
try
:
parameters
=
json
.
loads
(
ai_message
)
parameters
=
json
.
loads
(
ai_message
)
logger
.
info
(
f
"
Extracted parameters:
{
parameters
}
"
)
logger
.
info
(
f
"
Extracted parameters:
{
parameters
}
"
)
return
parameters
return
parameters
except
json
.
JSONDecodeError
as
json_error
:
except
json
.
JSONDecodeError
as
json_error
:
logger
.
error
(
f
"
JSON parsing error
{
json_error
}
"
)
logger
.
error
(
f
"
JSON parsing error
{
json_error
}
"
)
return
{
"
error
"
:
"
Failed to extract parameters
"
}
return
{
"
error
"
:
"
Failed to extract parameters
"
}
```
```
%% Cell type:markdown id: tags:
%% Cell type:markdown id: tags:
Set up the routing layer
Set up the routing layer
%% Cell type:code id: tags:
%% Cell type:code id: tags:
```
python
```
python
from
semantic_router.schema
import
Route
from
semantic_router.schema
import
Route
from
semantic_router.encoders
import
CohereEncoder
from
semantic_router.encoders
import
CohereEncoder
from
semantic_router.layer
import
RouteLayer
from
semantic_router.layer
import
RouteLayer
from
semantic_router.utils.logger
import
logger
from
semantic_router.utils.logger
import
logger
def
create_router
(
routes
:
list
[
dict
])
->
RouteLayer
:
def
create_router
(
routes
:
list
[
dict
])
->
RouteLayer
:
logger
.
info
(
"
Creating route layer...
"
)
logger
.
info
(
"
Creating route layer...
"
)
encoder
=
CohereEncoder
()
encoder
=
CohereEncoder
()
route_list
:
list
[
Route
]
=
[]
route_list
:
list
[
Route
]
=
[]
for
route
in
routes
:
for
route
in
routes
:
if
"
name
"
in
route
and
"
utterances
"
in
route
:
if
"
name
"
in
route
and
"
utterances
"
in
route
:
print
(
f
"
Route:
{
route
}
"
)
print
(
f
"
Route:
{
route
}
"
)
route_list
.
append
(
Route
(
name
=
route
[
"
name
"
],
utterances
=
route
[
"
utterances
"
]))
route_list
.
append
(
Route
(
name
=
route
[
"
name
"
],
utterances
=
route
[
"
utterances
"
]))
else
:
else
:
logger
.
warning
(
f
"
Misconfigured route:
{
route
}
"
)
logger
.
warning
(
f
"
Misconfigured route:
{
route
}
"
)
return
RouteLayer
(
encoder
=
encoder
,
routes
=
route_list
)
return
RouteLayer
(
encoder
=
encoder
,
routes
=
route_list
)
```
```
%% Cell type:markdown id: tags:
%% Cell type:markdown id: tags:
Set up calling functions
Set up calling functions
%% Cell type:code id: tags:
%% Cell type:code id: tags:
```
python
```
python
from
typing
import
Callable
from
typing
import
Callable
def
call_function
(
function
:
Callable
,
parameters
:
dict
[
str
,
str
]):
def
call_function
(
function
:
Callable
,
parameters
:
dict
[
str
,
str
]):
try
:
try
:
return
function
(
**
parameters
)
return
function
(
**
parameters
)
except
TypeError
as
e
:
except
TypeError
as
e
:
logger
.
error
(
f
"
Error calling function:
{
e
}
"
)
logger
.
error
(
f
"
Error calling function:
{
e
}
"
)
def
call_llm
(
query
:
str
):
def
call_llm
(
query
:
str
):
return
llm_mistral
(
query
)
return
llm_mistral
(
query
)
def
call
(
query
:
str
,
functions
:
list
[
Callable
],
router
:
RouteLayer
):
def
call
(
query
:
str
,
functions
:
list
[
Callable
],
router
:
RouteLayer
):
function_name
=
router
(
query
)
function_name
=
router
(
query
)
if
not
function_name
:
if
not
function_name
:
logger
.
warning
(
"
No function found
"
)
logger
.
warning
(
"
No function found
"
)
return
call_llm
(
query
)
return
call_llm
(
query
)
for
function
in
functions
:
for
function
in
functions
:
if
function
.
__name__
==
function_name
:
if
function
.
__name__
==
function_name
:
parameters
=
extract_parameters
(
query
,
function
)
parameters
=
extract_parameters
(
query
,
function
)
print
(
f
"
parameters:
{
parameters
}
"
)
print
(
f
"
parameters:
{
parameters
}
"
)
return
call_function
(
function
,
parameters
)
return
call_function
(
function
,
parameters
)
```
```
%% Cell type:markdown id: tags:
%% Cell type:markdown id: tags:
### Workflow
### Workflow
%% Cell type:code id: tags:
%% Cell type:code id: tags:
```
python
```
python
def
get_time
(
location
:
str
)
->
str
:
def
get_time
(
location
:
str
)
->
str
:
"""
Useful to get the time in a specific location
"""
"""
Useful to get the time in a specific location
"""
print
(
f
"
Calling `get_time` function with location:
{
location
}
"
)
print
(
f
"
Calling `get_time` function with location:
{
location
}
"
)
return
"
get_time
"
return
"
get_time
"
def
get_news
(
category
:
str
,
country
:
str
)
->
str
:
def
get_news
(
category
:
str
,
country
:
str
)
->
str
:
"""
Useful to get the news in a specific country
"""
"""
Useful to get the news in a specific country
"""
print
(
print
(
f
"
Calling `get_news` function with category:
{
category
}
and country:
{
country
}
"
f
"
Calling `get_news` function with category:
{
category
}
and country:
{
country
}
"
)
)
return
"
get_news
"
return
"
get_news
"
# Registering functions to the router
# Registering functions to the router
route_get_time
=
generate_route
(
get_time
)
route_get_time
=
generate_route
(
get_time
)
route_get_news
=
generate_route
(
get_news
)
route_get_news
=
generate_route
(
get_news
)
routes
=
[
route_get_time
,
route_get_news
]
routes
=
[
route_get_time
,
route_get_news
]
router
=
create_router
(
routes
)
router
=
create_router
(
routes
)
# Tools
# Tools
tools
=
[
get_time
,
get_news
]
tools
=
[
get_time
,
get_news
]
```
```
%% Cell type:code id: tags:
%% Cell type:code id: tags:
```
python
```
python
call
(
query
=
"
What is the time in Stockholm?
"
,
call
(
query
=
"
What is the time in Stockholm?
"
,
functions
=
tools
,
router
=
router
)
functions
=
tools
,
call
(
query
=
"
What is the tech news in the Lithuania?
"
,
functions
=
tools
,
router
=
router
)
router
=
router
)
call
(
query
=
"
Hi!
"
,
functions
=
tools
,
router
=
router
)
call
(
query
=
"
What is the tech news in the Lithuania?
"
,
functions
=
tools
,
router
=
router
)
call
(
query
=
"
Hi!
"
,
functions
=
tools
,
router
=
router
)
```
```
%% Output
%% Output
[32m2023-12-15 11:41:54 INFO semantic_router.utils.logger Extracting parameters...[0m
[32m2023-12-15 11:41:54 INFO semantic_router.utils.logger Extracting parameters...[0m
[32m2023-12-15 11:41:54 INFO semantic_router.utils.logger Calling Mistral model[0m
[32m2023-12-15 11:41:54 INFO semantic_router.utils.logger Calling Mistral model[0m
[32m2023-12-15 11:41:55 INFO semantic_router.utils.logger AI message:
[32m2023-12-15 11:41:55 INFO semantic_router.utils.logger AI message:
{
{
'location': 'Stockholm'
'location': 'Stockholm'
}[0m
}[0m
[32m2023-12-15 11:41:55 INFO semantic_router.utils.logger Extracted parameters: {'location': 'Stockholm'}[0m
[32m2023-12-15 11:41:55 INFO semantic_router.utils.logger Extracted parameters: {'location': 'Stockholm'}[0m
parameters: {'location': 'Stockholm'}
parameters: {'location': 'Stockholm'}
Calling `get_time` function with location: Stockholm
Calling `get_time` function with location: Stockholm
[32m2023-12-15 11:41:55 INFO semantic_router.utils.logger Extracting parameters...[0m
[32m2023-12-15 11:41:55 INFO semantic_router.utils.logger Extracting parameters...[0m
[32m2023-12-15 11:41:55 INFO semantic_router.utils.logger Calling Mistral model[0m
[32m2023-12-15 11:41:55 INFO semantic_router.utils.logger Calling Mistral model[0m
[32m2023-12-15 11:41:56 INFO semantic_router.utils.logger AI message:
[32m2023-12-15 11:41:56 INFO semantic_router.utils.logger AI message:
{
{
'category': 'tech',
'category': 'tech',
'country': 'Lithuania'
'country': 'Lithuania'
}[0m
}[0m
[32m2023-12-15 11:41:56 INFO semantic_router.utils.logger Extracted parameters: {'category': 'tech', 'country': 'Lithuania'}[0m
[32m2023-12-15 11:41:56 INFO semantic_router.utils.logger Extracted parameters: {'category': 'tech', 'country': 'Lithuania'}[0m
parameters: {'category': 'tech', 'country': 'Lithuania'}
parameters: {'category': 'tech', 'country': 'Lithuania'}
Calling `get_news` function with category: tech and country: Lithuania
Calling `get_news` function with category: tech and country: Lithuania
[33m2023-12-15 11:41:57 WARNING semantic_router.utils.logger No function found[0m
[33m2023-12-15 11:41:57 WARNING semantic_router.utils.logger No function found[0m
[32m2023-12-15 11:41:57 INFO semantic_router.utils.logger Calling Mistral model[0m
[32m2023-12-15 11:41:57 INFO semantic_router.utils.logger Calling Mistral model[0m
[32m2023-12-15 11:41:57 INFO semantic_router.utils.logger AI message: How can I help you today?[0m
[32m2023-12-15 11:41:57 INFO semantic_router.utils.logger AI message: How can I help you today?[0m
' How can I help you today?'
' How can I help you today?'
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment