Skip to content
Snippets Groups Projects
Unverified Commit d9a3f348 authored by Simonas's avatar Simonas Committed by GitHub
Browse files

Merge pull request #28 from aurelio-labs/luca/change-python-version

Change python version + fix on cohere encoder
parents 0082dd0f af9cccff
No related branches found
No related tags found
No related merge requests found
# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. # This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand.
[[package]] [[package]]
name = "aiohttp" name = "aiohttp"
...@@ -87,6 +87,7 @@ files = [ ...@@ -87,6 +87,7 @@ files = [
[package.dependencies] [package.dependencies]
aiosignal = ">=1.1.2" aiosignal = ">=1.1.2"
async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""}
attrs = ">=17.3.0" attrs = ">=17.3.0"
frozenlist = ">=1.1.1" frozenlist = ">=1.1.1"
multidict = ">=4.5,<7.0" multidict = ">=4.5,<7.0"
...@@ -121,6 +122,7 @@ files = [ ...@@ -121,6 +122,7 @@ files = [
] ]
[package.dependencies] [package.dependencies]
exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""}
idna = ">=2.8" idna = ">=2.8"
sniffio = ">=1.1" sniffio = ">=1.1"
...@@ -158,6 +160,17 @@ six = ">=1.12.0" ...@@ -158,6 +160,17 @@ six = ">=1.12.0"
astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"]
test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"]
[[package]]
name = "async-timeout"
version = "4.0.3"
description = "Timeout context manager for asyncio programs"
optional = false
python-versions = ">=3.7"
files = [
{file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"},
{file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"},
]
[[package]] [[package]]
name = "attrs" name = "attrs"
version = "23.1.0" version = "23.1.0"
...@@ -226,6 +239,8 @@ packaging = ">=22.0" ...@@ -226,6 +239,8 @@ packaging = ">=22.0"
pathspec = ">=0.9.0" pathspec = ">=0.9.0"
platformdirs = ">=2" platformdirs = ">=2"
tokenize-rt = {version = ">=3.2.0", optional = true, markers = "extra == \"jupyter\""} tokenize-rt = {version = ">=3.2.0", optional = true, markers = "extra == \"jupyter\""}
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""}
[package.extras] [package.extras]
colorama = ["colorama (>=0.4.3)"] colorama = ["colorama (>=0.4.3)"]
...@@ -546,6 +561,9 @@ files = [ ...@@ -546,6 +561,9 @@ files = [
{file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"}, {file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"},
] ]
[package.dependencies]
tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""}
[package.extras] [package.extras]
toml = ["tomli"] toml = ["tomli"]
...@@ -598,6 +616,20 @@ files = [ ...@@ -598,6 +616,20 @@ files = [
{file = "distro-1.8.0.tar.gz", hash = "sha256:02e111d1dc6a50abb8eed6bf31c3e48ed8b0830d1ea2a1b78c61765c2513fdd8"}, {file = "distro-1.8.0.tar.gz", hash = "sha256:02e111d1dc6a50abb8eed6bf31c3e48ed8b0830d1ea2a1b78c61765c2513fdd8"},
] ]
[[package]]
name = "exceptiongroup"
version = "1.2.0"
description = "Backport of PEP 654 (exception groups)"
optional = false
python-versions = ">=3.7"
files = [
{file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"},
{file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"},
]
[package.extras]
test = ["pytest (>=6)"]
[[package]] [[package]]
name = "execnet" name = "execnet"
version = "2.0.2" version = "2.0.2"
...@@ -886,6 +918,7 @@ files = [ ...@@ -886,6 +918,7 @@ files = [
[package.dependencies] [package.dependencies]
colorama = {version = "*", markers = "sys_platform == \"win32\""} colorama = {version = "*", markers = "sys_platform == \"win32\""}
decorator = "*" decorator = "*"
exceptiongroup = {version = "*", markers = "python_version < \"3.11\""}
jedi = ">=0.16" jedi = ">=0.16"
matplotlib-inline = "*" matplotlib-inline = "*"
pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""}
...@@ -1158,6 +1191,7 @@ files = [ ...@@ -1158,6 +1191,7 @@ files = [
[package.dependencies] [package.dependencies]
mypy-extensions = ">=1.0.0" mypy-extensions = ">=1.0.0"
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
typing-extensions = ">=4.1.0" typing-extensions = ">=4.1.0"
[package.extras] [package.extras]
...@@ -1532,9 +1566,11 @@ files = [ ...@@ -1532,9 +1566,11 @@ files = [
[package.dependencies] [package.dependencies]
colorama = {version = "*", markers = "sys_platform == \"win32\""} colorama = {version = "*", markers = "sys_platform == \"win32\""}
exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
iniconfig = "*" iniconfig = "*"
packaging = "*" packaging = "*"
pluggy = ">=0.12,<2.0" pluggy = ">=0.12,<2.0"
tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
[package.extras] [package.extras]
testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
...@@ -1932,6 +1968,17 @@ files = [ ...@@ -1932,6 +1968,17 @@ files = [
{file = "tokenize_rt-5.2.0.tar.gz", hash = "sha256:9fe80f8a5c1edad2d3ede0f37481cc0cc1538a2f442c9c2f9e4feacd2792d054"}, {file = "tokenize_rt-5.2.0.tar.gz", hash = "sha256:9fe80f8a5c1edad2d3ede0f37481cc0cc1538a2f442c9c2f9e4feacd2792d054"},
] ]
[[package]]
name = "tomli"
version = "2.0.1"
description = "A lil' TOML parser"
optional = false
python-versions = ">=3.7"
files = [
{file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
{file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
]
[[package]] [[package]]
name = "tornado" name = "tornado"
version = "6.4" version = "6.4"
...@@ -2155,5 +2202,5 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p ...@@ -2155,5 +2202,5 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p
[metadata] [metadata]
lock-version = "2.0" lock-version = "2.0"
python-versions = "^3.11" python-versions = "^3.10"
content-hash = "c231cdda3896d7ecd509a30bcaa552e23fcf9899952eba2f0f6d286256fd14b3" content-hash = "3300c77d6b6fab3faca403e2f3064a23e9f5ddcd34d63cad42d39b94b1ae5c2b"
...@@ -12,7 +12,7 @@ authors = [ ...@@ -12,7 +12,7 @@ authors = [
readme = "README.md" readme = "README.md"
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = "^3.11" python = "^3.10"
pydantic = "^1.8.2" pydantic = "^1.8.2"
openai = "^1.3.9" openai = "^1.3.9"
cohere = "^4.32" cohere = "^4.32"
......
...@@ -6,7 +6,7 @@ from semantic_router.encoders import BaseEncoder ...@@ -6,7 +6,7 @@ from semantic_router.encoders import BaseEncoder
class CohereEncoder(BaseEncoder): class CohereEncoder(BaseEncoder):
client: cohere.Client | None client: cohere.Client | None = None
def __init__( def __init__(
self, self,
......
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
# Semantic Router Walkthrough # Semantic Router Walkthrough
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
The Semantic Router library can be used as a super fast route making layer on top of LLMs. That means rather than waiting on a slow agent to decide what to do, we can use the magic of semantic vector space to make routes. Cutting route making time down from seconds to milliseconds. The Semantic Router library can be used as a super fast route making layer on top of LLMs. That means rather than waiting on a slow agent to decide what to do, we can use the magic of semantic vector space to make routes. Cutting route making time down from seconds to milliseconds.
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
## Getting Started ## Getting Started
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
We start by installing the library: We start by installing the library:
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
!pip install -qU semantic-router==0.0.6 !pip install -qU semantic-router==0.0.8
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
We start by defining a dictionary mapping routes to example phrases that should trigger those routes. We start by defining a dictionary mapping routes to example phrases that should trigger those routes.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
from semantic_router.schema import Route from semantic_router.schema import Route
politics = Route( politics = Route(
name="politics", name="politics",
utterances=[ utterances=[
"isn't politics the best thing ever", "isn't politics the best thing ever",
"why don't you tell me about your political opinions", "why don't you tell me about your political opinions",
"don't you just love the president" "don't you just hate the president", "don't you just love the president" "don't you just hate the president",
"they're going to destroy this country!", "they're going to destroy this country!",
"they will save the country!", "they will save the country!",
], ],
) )
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
Let's define another for good measure: Let's define another for good measure:
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
chitchat = Route( chitchat = Route(
name="chitchat", name="chitchat",
utterances=[ utterances=[
"how's the weather today?", "how's the weather today?",
"how are things going?", "how are things going?",
"lovely weather today", "lovely weather today",
"the weather is horrendous", "the weather is horrendous",
"let's go to the chippy", "let's go to the chippy",
], ],
) )
routes = [politics, chitchat] routes = [politics, chitchat]
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
Now we initialize our embedding model: Now we initialize our embedding model:
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
import os import os
from getpass import getpass from getpass import getpass
from semantic_router.encoders import CohereEncoder from semantic_router.encoders import CohereEncoder
os.environ["COHERE_API_KEY"] = os.getenv("COHERE_API_KEY") or getpass( os.environ["COHERE_API_KEY"] = os.getenv("COHERE_API_KEY") or getpass(
"Enter Cohere API Key: " "Enter Cohere API Key: "
) )
encoder = CohereEncoder() encoder = CohereEncoder()
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
Now we define the `RouteLayer`. When called, the route layer will consume text (a query) and output the category (`Route`) it belongs to — to initialize a `RouteLayer` we need our `encoder` model and a list of `routes`. Now we define the `RouteLayer`. When called, the route layer will consume text (a query) and output the category (`Route`) it belongs to — to initialize a `RouteLayer` we need our `encoder` model and a list of `routes`.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
from semantic_router.layer import RouteLayer from semantic_router.layer import RouteLayer
dl = RouteLayer(encoder=encoder, routes=routes) dl = RouteLayer(encoder=encoder, routes=routes)
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
Now we can test it: Now we can test it:
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
dl("don't you love politics?") dl("don't you love politics?")
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
dl("how's the weather today?") dl("how's the weather today?")
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
Both are classified accurately, what if we send a query that is unrelated to our existing `Route` objects? Both are classified accurately, what if we send a query that is unrelated to our existing `Route` objects?
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
dl("I'm interested in learning about llama 2") dl("I'm interested in learning about llama 2")
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
In this case, we return `None` because no matches were identified. In this case, we return `None` because no matches were identified.
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment