From 7e6fb68789392da0c3430b1afdf50c789cf27ea0 Mon Sep 17 00:00:00 2001 From: James Briggs <james.briggs@hotmail.com> Date: Mon, 8 Jul 2024 16:41:00 +0800 Subject: [PATCH] fix: bad doc format --- docs/00-introduction.ipynb | 658 ++++++++++++++++++++----------------- 1 file changed, 357 insertions(+), 301 deletions(-) diff --git a/docs/00-introduction.ipynb b/docs/00-introduction.ipynb index b4403ad6..260f1e99 100644 --- a/docs/00-introduction.ipynb +++ b/docs/00-introduction.ipynb @@ -1,316 +1,372 @@ { - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "[](https://colab.research.google.com/github/aurelio-labs/semantic-router/blob/main/docs/00-introduction.ipynb) [](https://nbviewer.org/github/aurelio-labs/semantic-router/blob/main/docs/00-introduction.ipynb)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Semantic Router Intro" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The Semantic Router library can be used as a super fast route making layer on top of LLMs. That means rather than waiting on a slow agent to decide what to do, we can use the magic of semantic vector space to make routes. Cutting route making time down from seconds to milliseconds." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Getting Started" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We start by installing the library:" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "!pip install -qU semantic-router" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We start by defining a dictionary mapping routes to example phrases that should trigger those routes." - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ + "cells": [ { - "name": "stderr", - "output_type": "stream", - "text": [ - "c:\\Users\\Siraj\\Documents\\Personal\\Work\\Aurelio\\Virtual Environments\\semantic_router_3\\Lib\\site-packages\\tqdm\\auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", - " from .autonotebook import tqdm as notebook_tqdm\n" - ] - } - ], - "source": [ - "from semantic_router import Route\n", - "\n", - "politics = Route(\n", - " name=\"politics\",\n", - " utterances=[\n", - " \"isn't politics the best thing ever\",\n", - " \"why don't you tell me about your political opinions\",\n", - " \"don't you just love the president\",\n", - " \"don't you just hate the president\",\n", - " \"they're going to destroy this country!\",\n", - " \"they will save the country!\",\n", - " ],\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Let's define another for good measure:" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "chitchat = Route(\n", - " name=\"chitchat\",\n", - " utterances=[\n", - " \"how's the weather today?\",\n", - " \"how are things going?\",\n", - " \"lovely weather today\",\n", - " \"the weather is horrendous\",\n", - " \"let's go to the chippy\",\n", - " ],\n", - ")\n", - "\n", - "routes = [politics, chitchat]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Now we initialize our embedding model:" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "from getpass import getpass\n", - "from semantic_router.encoders import CohereEncoder, OpenAIEncoder\n", - "\n", - "# os.environ[\"COHERE_API_KEY\"] = os.getenv(\"COHERE_API_KEY\") or getpass(\n", - "# \"Enter Cohere API Key: \"\n", - "# )\n", - "os.environ[\"OPENAI_API_KEY\"] = os.getenv(\"OPENAI_API_KEY\") or getpass(\n", - " \"Enter OpenAI API Key: \"\n", - ")\n", - "\n", - "# encoder = CohereEncoder()\n", - "encoder = OpenAIEncoder()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Now we define the `RouteLayer`. When called, the route layer will consume text (a query) and output the category (`Route`) it belongs to — to initialize a `RouteLayer` we need our `encoder` model and a list of `routes`." - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ + "cell_type": "markdown", + "metadata": { + "id": "K7NsuSPNf3px" + }, + "source": [ + "[](https://colab.research.google.com/github/aurelio-labs/semantic-router/blob/main/docs/00-introduction.ipynb) [](https://nbviewer.org/github/aurelio-labs/semantic-router/blob/main/docs/00-introduction.ipynb)" + ] + }, { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[32m2024-05-07 15:02:46 INFO semantic_router.utils.logger local\u001b[0m\n" - ] - } - ], - "source": [ - "from semantic_router.layer import RouteLayer\n", - "\n", - "rl = RouteLayer(encoder=encoder, routes=routes)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Now we can test it:" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ + "cell_type": "markdown", + "metadata": { + "id": "Am2hmLzTf3py" + }, + "source": [ + "# Semantic Router Intro" + ] + }, { - "data": { - "text/plain": [ - "RouteChoice(name='politics', function_call=None, similarity_score=None)" + "cell_type": "markdown", + "metadata": { + "id": "k1nRRAbYf3py" + }, + "source": [ + "The Semantic Router library can be used as a super fast route making layer on top of LLMs. That means rather than waiting on a slow agent to decide what to do, we can use the magic of semantic vector space to make routes. Cutting route making time down from seconds to milliseconds." ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "rl(\"don't you love politics?\")" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ + }, { - "data": { - "text/plain": [ - "RouteChoice(name='chitchat', function_call=None, similarity_score=None)" + "cell_type": "markdown", + "metadata": { + "id": "NggrMQP2f3py" + }, + "source": [ + "## Getting Started" ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "rl(\"how's the weather today?\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Both are classified accurately, what if we send a query that is unrelated to our existing `Route` objects?" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ + }, { - "data": { - "text/plain": [ - "RouteChoice(name=None, function_call=None, similarity_score=None)" + "cell_type": "markdown", + "metadata": { + "id": "9zP-l_T7f3py" + }, + "source": [ + "We start by installing the library:" ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "rl(\"I'm interested in learning about llama 2\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We can also retrieve multiple routes with its associated score:" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ + }, { - "data": { - "text/plain": [ - "[RouteChoice(name='politics', function_call=None, similarity_score=0.8595844842560181),\n", - " RouteChoice(name='chitchat', function_call=None, similarity_score=0.8356704527362284)]" + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "4YI81tu0f3pz" + }, + "outputs": [], + "source": [ + "!pip install -qU semantic-router" ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "rl.retrieve_multiple_routes(\"Hi! How are you doing in politics??\")" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ + }, + { + "cell_type": "markdown", + "metadata": { + "id": "HfB8252ff3pz" + }, + "source": [ + "We start by defining a dictionary mapping routes to example phrases that should trigger those routes." + ] + }, { - "data": { - "text/plain": [ - "[]" + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "lslfqYOEf3pz", + "outputId": "c13e3e77-310c-4b86-e291-4b6005d698bd" + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "c:\\Users\\Siraj\\Documents\\Personal\\Work\\Aurelio\\Virtual Environments\\semantic_router_3\\Lib\\site-packages\\tqdm\\auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", + " from .autonotebook import tqdm as notebook_tqdm\n" + ] + } + ], + "source": [ + "from semantic_router import Route\n", + "\n", + "politics = Route(\n", + " name=\"politics\",\n", + " utterances=[\n", + " \"isn't politics the best thing ever\",\n", + " \"why don't you tell me about your political opinions\",\n", + " \"don't you just love the president\",\n", + " \"don't you just hate the president\",\n", + " \"they're going to destroy this country!\",\n", + " \"they will save the country!\",\n", + " ],\n", + ")" ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" + }, + { + "cell_type": "markdown", + "metadata": { + "id": "WYLHUDa1f3p0" + }, + "source": [ + "Let's define another for good measure:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "LAdY1jdxf3p0" + }, + "outputs": [], + "source": [ + "chitchat = Route(\n", + " name=\"chitchat\",\n", + " utterances=[\n", + " \"how's the weather today?\",\n", + " \"how are things going?\",\n", + " \"lovely weather today\",\n", + " \"the weather is horrendous\",\n", + " \"let's go to the chippy\",\n", + " ],\n", + ")\n", + "\n", + "routes = [politics, chitchat]" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "ReN59ieGf3p0" + }, + "source": [ + "Now we initialize our embedding model:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "MF47W_Sof3p2" + }, + "outputs": [], + "source": [ + "import os\n", + "from getpass import getpass\n", + "from semantic_router.encoders import CohereEncoder, OpenAIEncoder\n", + "\n", + "# os.environ[\"COHERE_API_KEY\"] = os.getenv(\"COHERE_API_KEY\") or getpass(\n", + "# \"Enter Cohere API Key: \"\n", + "# )\n", + "os.environ[\"OPENAI_API_KEY\"] = os.getenv(\"OPENAI_API_KEY\") or getpass(\n", + " \"Enter OpenAI API Key: \"\n", + ")\n", + "\n", + "# encoder = CohereEncoder()\n", + "encoder = OpenAIEncoder()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "lYuLO0l9f3p3" + }, + "source": [ + "Now we define the `RouteLayer`. When called, the route layer will consume text (a query) and output the category (`Route`) it belongs to — to initialize a `RouteLayer` we need our `encoder` model and a list of `routes`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "dh1U8IDOf3p3", + "outputId": "872810da-956a-47af-a91f-217ce351a88b" + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[32m2024-05-07 15:02:46 INFO semantic_router.utils.logger local\u001b[0m\n" + ] + } + ], + "source": [ + "from semantic_router.layer import RouteLayer\n", + "\n", + "rl = RouteLayer(encoder=encoder, routes=routes)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "Xj32uEF-f3p3" + }, + "source": [ + "Now we can test it:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "fIXOjRp9f3p3", + "outputId": "8b9b5746-ae7c-43bb-d84f-5fa7c30e423e" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "RouteChoice(name='politics', function_call=None, similarity_score=None)" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "rl(\"don't you love politics?\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "0UN2mKvjf3p4", + "outputId": "062f9499-7db3-49d2-81ef-e7d5dc9a88f6" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "RouteChoice(name='chitchat', function_call=None, similarity_score=None)" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "rl(\"how's the weather today?\")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "NHZWZKoTf3p4" + }, + "source": [ + "Both are classified accurately, what if we send a query that is unrelated to our existing `Route` objects?" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "0WnvGJByf3p4", + "outputId": "4496e9b2-7cd8-4466-fe1a-3e6f5cf30b0d" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "RouteChoice(name=None, function_call=None, similarity_score=None)" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "rl(\"I'm interested in learning about llama 2\")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "dDZF2eN4f3p4" + }, + "source": [ + "We can also retrieve multiple routes with its associated score:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "n27I7kmpf3p4", + "outputId": "2138e077-190b-41b7-a3eb-4fd76e2f59c2" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[RouteChoice(name='politics', function_call=None, similarity_score=0.8595844842560181),\n", + " RouteChoice(name='chitchat', function_call=None, similarity_score=0.8356704527362284)]" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "rl.retrieve_multiple_routes(\"Hi! How are you doing in politics??\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "zi4XJ7Amf3p4", + "outputId": "cf05cd65-d4f4-454a-ef05-77f16f37cc8f" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[]" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "rl.retrieve_multiple_routes(\"I'm interested in learning about llama 2\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "QF_wATjYf3p4" + }, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "decision-layer", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.4" + }, + "colab": { + "provenance": [] } - ], - "source": [ - "rl.retrieve_multiple_routes(\"I'm interested in learning about llama 2\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "decision-layer", - "language": "python", - "name": "python3" }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.4" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file -- GitLab