From a836578095e304e6a8896949e809ff3658c55290 Mon Sep 17 00:00:00 2001
From: Simonas <20096648+simjak@users.noreply.github.com>
Date: Mon, 18 Dec 2023 15:40:56 +0200
Subject: [PATCH] embed all utterance at once

---
 coverage.xml                         |  93 +++++++++--------
 docs/examples/function_calling.ipynb | 143 ++++++++++++++-------------
 docs/examples/hybrid-layer.ipynb     |  27 +++--
 semantic_router/hybrid_layer.py      |  38 ++++++-
 semantic_router/utils/logger.py      |   9 --
 tests/unit/test_hybrid_layer.py      |   2 +-
 6 files changed, 173 insertions(+), 139 deletions(-)

diff --git a/coverage.xml b/coverage.xml
index 9af9ebee..899b23e4 100644
--- a/coverage.xml
+++ b/coverage.xml
@@ -1,5 +1,5 @@
 <?xml version="1.0" ?>
-<coverage version="7.3.3" timestamp="1702894511196" lines-valid="345" lines-covered="345" line-rate="1" branches-covered="0" branches-valid="0" branch-rate="0" complexity="0">
+<coverage version="7.3.3" timestamp="1702906788381" lines-valid="352" lines-covered="352" line-rate="1" branches-covered="0" branches-valid="0" branch-rate="0" complexity="0">
 	<!-- Generated by coverage.py: https://coverage.readthedocs.io/en/7.3.3 -->
 	<!-- Based on https://raw.githubusercontent.com/cobertura/web/master/htdocs/xml/coverage-04.dtd -->
 	<sources>
@@ -21,26 +21,24 @@
 					<lines>
 						<line number="1" hits="1"/>
 						<line number="2" hits="1"/>
-						<line number="3" hits="1"/>
-						<line number="5" hits="1"/>
+						<line number="4" hits="1"/>
+						<line number="10" hits="1"/>
 						<line number="11" hits="1"/>
-						<line number="12" hits="1"/>
+						<line number="14" hits="1"/>
 						<line number="15" hits="1"/>
 						<line number="16" hits="1"/>
 						<line number="17" hits="1"/>
 						<line number="18" hits="1"/>
-						<line number="19" hits="1"/>
-						<line number="21" hits="1"/>
+						<line number="20" hits="1"/>
+						<line number="23" hits="1"/>
 						<line number="24" hits="1"/>
 						<line number="25" hits="1"/>
-						<line number="26" hits="1"/>
+						<line number="27" hits="1"/>
 						<line number="28" hits="1"/>
 						<line number="29" hits="1"/>
 						<line number="30" hits="1"/>
-						<line number="31" hits="1"/>
-						<line number="33" hits="1"/>
-						<line number="35" hits="1"/>
-						<line number="37" hits="1"/>
+						<line number="32" hits="1"/>
+						<line number="34" hits="1"/>
 						<line number="38" hits="1"/>
 						<line number="40" hits="1"/>
 						<line number="41" hits="1"/>
@@ -67,48 +65,58 @@
 						<line number="76" hits="1"/>
 						<line number="78" hits="1"/>
 						<line number="80" hits="1"/>
-						<line number="85" hits="1"/>
+						<line number="82" hits="1"/>
+						<line number="83" hits="1"/>
 						<line number="86" hits="1"/>
-						<line number="88" hits="1"/>
-						<line number="89" hits="1"/>
+						<line number="87" hits="1"/>
+						<line number="90" hits="1"/>
 						<line number="91" hits="1"/>
-						<line number="93" hits="1"/>
-						<line number="95" hits="1"/>
-						<line number="96" hits="1"/>
-						<line number="97" hits="1"/>
+						<line number="92" hits="1"/>
 						<line number="99" hits="1"/>
-						<line number="100" hits="1"/>
-						<line number="101" hits="1"/>
-						<line number="102" hits="1"/>
-						<line number="104" hits="1"/>
-						<line number="105" hits="1"/>
 						<line number="106" hits="1"/>
-						<line number="108" hits="1"/>
-						<line number="109" hits="1"/>
-						<line number="111" hits="1"/>
 						<line number="112" hits="1"/>
-						<line number="114" hits="1"/>
-						<line number="116" hits="1"/>
 						<line number="117" hits="1"/>
 						<line number="118" hits="1"/>
 						<line number="120" hits="1"/>
 						<line number="121" hits="1"/>
-						<line number="122" hits="1"/>
 						<line number="123" hits="1"/>
-						<line number="124" hits="1"/>
 						<line number="125" hits="1"/>
-						<line number="126" hits="1"/>
+						<line number="127" hits="1"/>
 						<line number="128" hits="1"/>
+						<line number="129" hits="1"/>
 						<line number="131" hits="1"/>
 						<line number="132" hits="1"/>
-						<line number="135" hits="1"/>
+						<line number="133" hits="1"/>
+						<line number="134" hits="1"/>
 						<line number="136" hits="1"/>
+						<line number="137" hits="1"/>
 						<line number="138" hits="1"/>
-						<line number="139" hits="1"/>
+						<line number="140" hits="1"/>
 						<line number="141" hits="1"/>
-						<line number="142" hits="1"/>
 						<line number="143" hits="1"/>
-						<line number="145" hits="1"/>
+						<line number="144" hits="1"/>
+						<line number="146" hits="1"/>
+						<line number="148" hits="1"/>
+						<line number="149" hits="1"/>
+						<line number="150" hits="1"/>
+						<line number="152" hits="1"/>
+						<line number="153" hits="1"/>
+						<line number="154" hits="1"/>
+						<line number="155" hits="1"/>
+						<line number="156" hits="1"/>
+						<line number="157" hits="1"/>
+						<line number="158" hits="1"/>
+						<line number="160" hits="1"/>
+						<line number="163" hits="1"/>
+						<line number="164" hits="1"/>
+						<line number="167" hits="1"/>
+						<line number="168" hits="1"/>
+						<line number="170" hits="1"/>
+						<line number="171" hits="1"/>
+						<line number="173" hits="1"/>
+						<line number="174" hits="1"/>
+						<line number="175" hits="1"/>
+						<line number="177" hits="1"/>
 					</lines>
 				</class>
 				<class name="layer.py" filename="layer.py" complexity="0" line-rate="1" branch-rate="0">
@@ -405,19 +413,18 @@
 						<line number="8" hits="1"/>
 						<line number="23" hits="1"/>
 						<line number="24" hits="1"/>
+						<line number="25" hits="1"/>
 						<line number="26" hits="1"/>
 						<line number="27" hits="1"/>
-						<line number="29" hits="1"/>
+						<line number="28" hits="1"/>
+						<line number="31" hits="1"/>
+						<line number="32" hits="1"/>
+						<line number="33" hits="1"/>
 						<line number="35" hits="1"/>
 						<line number="37" hits="1"/>
+						<line number="38" hits="1"/>
 						<line number="40" hits="1"/>
-						<line number="41" hits="1"/>
-						<line number="42" hits="1"/>
-						<line number="44" hits="1"/>
-						<line number="46" hits="1"/>
-						<line number="47" hits="1"/>
-						<line number="49" hits="1"/>
-						<line number="52" hits="1"/>
+						<line number="43" hits="1"/>
 					</lines>
 				</class>
 			</classes>
diff --git a/docs/examples/function_calling.ipynb b/docs/examples/function_calling.ipynb
index 5d3be2fb..466b8f0a 100644
--- a/docs/examples/function_calling.ipynb
+++ b/docs/examples/function_calling.ipynb
@@ -9,9 +9,18 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 12,
+   "execution_count": 1,
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "/Users/jakit/customers/aurelio/semantic-router/.venv/lib/python3.11/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
+      "  from .autonotebook import tqdm as notebook_tqdm\n"
+     ]
+    }
+   ],
    "source": [
     "# OpenAI\n",
     "import openai\n",
@@ -39,7 +48,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 13,
+   "execution_count": 2,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -91,7 +100,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 14,
+   "execution_count": 3,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -113,7 +122,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 15,
+   "execution_count": 4,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -130,7 +139,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 16,
+   "execution_count": 5,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -212,7 +221,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 17,
+   "execution_count": 6,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -228,7 +237,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 18,
+   "execution_count": 7,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -308,7 +317,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 19,
+   "execution_count": 8,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -342,7 +351,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 24,
+   "execution_count": 9,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -389,52 +398,52 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 21,
+   "execution_count": 10,
    "metadata": {},
    "outputs": [
     {
      "name": "stderr",
      "output_type": "stream",
      "text": [
-      "\u001b[32m2023-12-18 12:17:58 INFO semantic_router.utils.logger Generating config...\u001b[0m\n",
-      "\u001b[32m2023-12-18 12:17:58 INFO semantic_router.utils.logger Calling Mistral model\u001b[0m\n",
-      "\u001b[31m2023-12-18 12:18:00 ERROR semantic_router.utils.logger Fall back to OpenAI failed with error ('Failed to call HuggingFace API', '{\"error\":\"Bad Gateway\"}')\u001b[0m\n",
-      "\u001b[32m2023-12-18 12:18:00 INFO semantic_router.utils.logger Calling gpt-4 model\u001b[0m\n",
-      "\u001b[32m2023-12-18 12:18:05 INFO semantic_router.utils.logger AI message: {\n",
-      "    \"name\": \"get_time\",\n",
-      "    \"utterances\": [\n",
-      "        \"what is the time in new york\",\n",
-      "        \"can you tell me the time in london\",\n",
-      "        \"get me the current time in tokyo\",\n",
-      "        \"i need to know the time in sydney\",\n",
-      "        \"please tell me the current time in paris\"\n",
-      "    ]\n",
-      "}\u001b[0m\n",
-      "\u001b[32m2023-12-18 12:18:05 INFO semantic_router.utils.logger Generated config: {'name': 'get_time', 'utterances': ['what is the time in new york', 'can you tell me the time in london', 'get me the current time in tokyo', 'i need to know the time in sydney', 'please tell me the current time in paris']}\u001b[0m\n",
-      "\u001b[32m2023-12-18 12:18:05 INFO semantic_router.utils.logger Generating config...\u001b[0m\n",
-      "\u001b[32m2023-12-18 12:18:05 INFO semantic_router.utils.logger Calling Mistral model\u001b[0m\n",
-      "\u001b[31m2023-12-18 12:18:07 ERROR semantic_router.utils.logger Fall back to OpenAI failed with error ('Failed to call HuggingFace API', '{\"error\":\"Bad Gateway\"}')\u001b[0m\n",
-      "\u001b[32m2023-12-18 12:18:07 INFO semantic_router.utils.logger Calling gpt-4 model\u001b[0m\n",
-      "\u001b[32m2023-12-18 12:18:12 INFO semantic_router.utils.logger AI message: {\n",
-      "    \"name\": \"get_news\",\n",
-      "    \"utterances\": [\n",
-      "        \"Can I get the latest news in Canada?\",\n",
-      "        \"Show me the recent news in the US\",\n",
-      "        \"I would like to know about the sports news in England\",\n",
-      "        \"Let's check the technology news in Japan\",\n",
-      "        \"Show me the health related news in Germany\"\n",
-      "    ]\n",
-      "}\u001b[0m\n",
-      "\u001b[32m2023-12-18 12:18:12 INFO semantic_router.utils.logger Generated config: {'name': 'get_news', 'utterances': ['Can I get the latest news in Canada?', 'Show me the recent news in the US', 'I would like to know about the sports news in England', \"Let's check the technology news in Japan\", 'Show me the health related news in Germany']}\u001b[0m\n",
-      "\u001b[32m2023-12-18 12:18:12 INFO semantic_router.utils.logger Creating route layer...\u001b[0m\n"
+      "\u001b[32m2023-12-18 14:47:47 INFO semantic_router.utils.logger Generating config...\u001b[0m\n",
+      "\u001b[32m2023-12-18 14:47:47 INFO semantic_router.utils.logger Calling Mistral model\u001b[0m\n",
+      "\u001b[32m2023-12-18 14:47:50 INFO semantic_router.utils.logger AI message: \n",
+      "    Example output:\n",
+      "    {\n",
+      "        \"name\": \"get_time\",\n",
+      "        \"utterances\": [\n",
+      "            \"What's the time in New York?\",\n",
+      "            \"Tell me the time in Tokyo.\",\n",
+      "            \"Can you give me the time in London?\",\n",
+      "            \"What's the current time in Sydney?\",\n",
+      "            \"Can you tell me the time in Berlin?\"\n",
+      "        ]\n",
+      "    }\u001b[0m\n",
+      "\u001b[32m2023-12-18 14:47:50 INFO semantic_router.utils.logger Generated config: {'name': 'get_time', 'utterances': [\"What's the time in New York?\", 'Tell me the time in Tokyo.', 'Can you give me the time in London?', \"What's the current time in Sydney?\", 'Can you tell me the time in Berlin?']}\u001b[0m\n",
+      "\u001b[32m2023-12-18 14:47:50 INFO semantic_router.utils.logger Generating config...\u001b[0m\n",
+      "\u001b[32m2023-12-18 14:47:50 INFO semantic_router.utils.logger Calling Mistral model\u001b[0m\n",
+      "\u001b[32m2023-12-18 14:47:54 INFO semantic_router.utils.logger AI message: \n",
+      "    Example output:\n",
+      "    {\n",
+      "        \"name\": \"get_news\",\n",
+      "        \"utterances\": [\n",
+      "            \"Tell me the latest news from the US\",\n",
+      "            \"What's happening in India today?\",\n",
+      "            \"Get me the top stories from Japan\",\n",
+      "            \"Can you give me the breaking news from Brazil?\",\n",
+      "            \"What's the latest news from Germany?\"\n",
+      "        ]\n",
+      "    }\u001b[0m\n",
+      "\u001b[32m2023-12-18 14:47:54 INFO semantic_router.utils.logger Generated config: {'name': 'get_news', 'utterances': ['Tell me the latest news from the US', \"What's happening in India today?\", 'Get me the top stories from Japan', 'Can you give me the breaking news from Brazil?', \"What's the latest news from Germany?\"]}\u001b[0m\n",
+      "\u001b[32m2023-12-18 14:47:54 INFO semantic_router.utils.logger Creating route layer...\u001b[0m\n"
      ]
     },
     {
      "name": "stdout",
      "output_type": "stream",
      "text": [
-      "Route: {'name': 'get_time', 'utterances': ['what is the time in new york', 'can you tell me the time in london', 'get me the current time in tokyo', 'i need to know the time in sydney', 'please tell me the current time in paris']}\n",
-      "Route: {'name': 'get_news', 'utterances': ['Can I get the latest news in Canada?', 'Show me the recent news in the US', 'I would like to know about the sports news in England', \"Let's check the technology news in Japan\", 'Show me the health related news in Germany']}\n"
+      "Route: {'name': 'get_time', 'utterances': [\"What's the time in New York?\", 'Tell me the time in Tokyo.', 'Can you give me the time in London?', \"What's the current time in Sydney?\", 'Can you tell me the time in Berlin?']}\n",
+      "Route: {'name': 'get_news', 'utterances': ['Tell me the latest news from the US', \"What's happening in India today?\", 'Get me the top stories from Japan', 'Can you give me the breaking news from Brazil?', \"What's the latest news from Germany?\"]}\n"
      ]
     }
    ],
@@ -466,16 +475,16 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 27,
+   "execution_count": 11,
    "metadata": {},
    "outputs": [
     {
      "name": "stderr",
      "output_type": "stream",
      "text": [
-      "\u001b[32m2023-12-18 12:20:12 INFO semantic_router.utils.logger Generating config...\u001b[0m\n",
-      "\u001b[32m2023-12-18 12:20:12 INFO semantic_router.utils.logger Calling Mistral model\u001b[0m\n",
-      "\u001b[32m2023-12-18 12:20:16 INFO semantic_router.utils.logger AI message: \n",
+      "\u001b[32m2023-12-18 14:47:55 INFO semantic_router.utils.logger Generating config...\u001b[0m\n",
+      "\u001b[32m2023-12-18 14:47:55 INFO semantic_router.utils.logger Calling Mistral model\u001b[0m\n",
+      "\u001b[32m2023-12-18 14:47:58 INFO semantic_router.utils.logger AI message: \n",
       "    Example output:\n",
       "    {\n",
       "        \"name\": \"get_time\",\n",
@@ -487,10 +496,10 @@
       "            \"Can you tell me the time in Berlin?\"\n",
       "        ]\n",
       "    }\u001b[0m\n",
-      "\u001b[32m2023-12-18 12:20:16 INFO semantic_router.utils.logger Generated config: {'name': 'get_time', 'utterances': [\"What's the time in New York?\", 'Tell me the time in Tokyo.', 'Can you give me the time in London?', \"What's the current time in Sydney?\", 'Can you tell me the time in Berlin?']}\u001b[0m\n",
-      "\u001b[32m2023-12-18 12:20:16 INFO semantic_router.utils.logger Generating config...\u001b[0m\n",
-      "\u001b[32m2023-12-18 12:20:16 INFO semantic_router.utils.logger Calling Mistral model\u001b[0m\n",
-      "\u001b[32m2023-12-18 12:20:20 INFO semantic_router.utils.logger AI message: \n",
+      "\u001b[32m2023-12-18 14:47:58 INFO semantic_router.utils.logger Generated config: {'name': 'get_time', 'utterances': [\"What's the time in New York?\", 'Tell me the time in Tokyo.', 'Can you give me the time in London?', \"What's the current time in Sydney?\", 'Can you tell me the time in Berlin?']}\u001b[0m\n",
+      "\u001b[32m2023-12-18 14:47:58 INFO semantic_router.utils.logger Generating config...\u001b[0m\n",
+      "\u001b[32m2023-12-18 14:47:58 INFO semantic_router.utils.logger Calling Mistral model\u001b[0m\n",
+      "\u001b[32m2023-12-18 14:48:02 INFO semantic_router.utils.logger AI message: \n",
       "    Example output:\n",
       "    {\n",
       "        \"name\": \"get_news\",\n",
@@ -502,8 +511,8 @@
       "            \"What's the latest news from Germany?\"\n",
       "        ]\n",
       "    }\u001b[0m\n",
-      "\u001b[32m2023-12-18 12:20:20 INFO semantic_router.utils.logger Generated config: {'name': 'get_news', 'utterances': ['Tell me the latest news from the US', \"What's happening in India today?\", 'Get me the top stories from Japan', 'Can you give me the breaking news from Brazil?', \"What's the latest news from Germany?\"]}\u001b[0m\n",
-      "\u001b[32m2023-12-18 12:20:20 INFO semantic_router.utils.logger Creating route layer...\u001b[0m\n"
+      "\u001b[32m2023-12-18 14:48:02 INFO semantic_router.utils.logger Generated config: {'name': 'get_news', 'utterances': ['Tell me the latest news from the US', \"What's happening in India today?\", 'Get me the top stories from Japan', 'Can you give me the breaking news from Brazil?', \"What's the latest news from Germany?\"]}\u001b[0m\n",
+      "\u001b[32m2023-12-18 14:48:02 INFO semantic_router.utils.logger Creating route layer...\u001b[0m\n"
      ]
     },
     {
@@ -543,20 +552,20 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 26,
+   "execution_count": 12,
    "metadata": {},
    "outputs": [
     {
      "name": "stderr",
      "output_type": "stream",
      "text": [
-      "\u001b[32m2023-12-18 12:20:02 INFO semantic_router.utils.logger Extracting parameters...\u001b[0m\n",
-      "\u001b[32m2023-12-18 12:20:02 INFO semantic_router.utils.logger Calling Mistral model\u001b[0m\n",
-      "\u001b[32m2023-12-18 12:20:04 INFO semantic_router.utils.logger AI message: \n",
+      "\u001b[32m2023-12-18 14:48:02 INFO semantic_router.utils.logger Extracting parameters...\u001b[0m\n",
+      "\u001b[32m2023-12-18 14:48:02 INFO semantic_router.utils.logger Calling Mistral model\u001b[0m\n",
+      "\u001b[32m2023-12-18 14:48:04 INFO semantic_router.utils.logger AI message: \n",
       "    {\n",
       "        \"location\": \"Stockholm\"\n",
       "    }\u001b[0m\n",
-      "\u001b[32m2023-12-18 12:20:04 INFO semantic_router.utils.logger Extracted parameters: {'location': 'Stockholm'}\u001b[0m\n"
+      "\u001b[32m2023-12-18 14:48:04 INFO semantic_router.utils.logger Extracted parameters: {'location': 'Stockholm'}\u001b[0m\n"
      ]
     },
     {
@@ -571,14 +580,14 @@
      "name": "stderr",
      "output_type": "stream",
      "text": [
-      "\u001b[32m2023-12-18 12:20:04 INFO semantic_router.utils.logger Extracting parameters...\u001b[0m\n",
-      "\u001b[32m2023-12-18 12:20:04 INFO semantic_router.utils.logger Calling Mistral model\u001b[0m\n",
-      "\u001b[32m2023-12-18 12:20:05 INFO semantic_router.utils.logger AI message: \n",
+      "\u001b[32m2023-12-18 14:48:04 INFO semantic_router.utils.logger Extracting parameters...\u001b[0m\n",
+      "\u001b[32m2023-12-18 14:48:04 INFO semantic_router.utils.logger Calling Mistral model\u001b[0m\n",
+      "\u001b[32m2023-12-18 14:48:05 INFO semantic_router.utils.logger AI message: \n",
       "    {\n",
       "        \"category\": \"tech\",\n",
       "        \"country\": \"Lithuania\"\n",
       "    }\u001b[0m\n",
-      "\u001b[32m2023-12-18 12:20:05 INFO semantic_router.utils.logger Extracted parameters: {'category': 'tech', 'country': 'Lithuania'}\u001b[0m\n"
+      "\u001b[32m2023-12-18 14:48:05 INFO semantic_router.utils.logger Extracted parameters: {'category': 'tech', 'country': 'Lithuania'}\u001b[0m\n"
      ]
     },
     {
@@ -593,9 +602,9 @@
      "name": "stderr",
      "output_type": "stream",
      "text": [
-      "\u001b[33m2023-12-18 12:20:05 WARNING semantic_router.utils.logger No function found\u001b[0m\n",
-      "\u001b[32m2023-12-18 12:20:05 INFO semantic_router.utils.logger Calling Mistral model\u001b[0m\n",
-      "\u001b[32m2023-12-18 12:20:06 INFO semantic_router.utils.logger AI message:  How can I help you today?\u001b[0m\n"
+      "\u001b[33m2023-12-18 14:48:05 WARNING semantic_router.utils.logger No function found\u001b[0m\n",
+      "\u001b[32m2023-12-18 14:48:05 INFO semantic_router.utils.logger Calling Mistral model\u001b[0m\n",
+      "\u001b[32m2023-12-18 14:48:06 INFO semantic_router.utils.logger AI message:  How can I help you today?\u001b[0m\n"
      ]
     },
     {
@@ -604,7 +613,7 @@
        "' How can I help you today?'"
       ]
      },
-     "execution_count": 26,
+     "execution_count": 12,
      "metadata": {},
      "output_type": "execute_result"
     }
diff --git a/docs/examples/hybrid-layer.ipynb b/docs/examples/hybrid-layer.ipynb
index 8b1da5ae..5d0cb452 100644
--- a/docs/examples/hybrid-layer.ipynb
+++ b/docs/examples/hybrid-layer.ipynb
@@ -34,7 +34,7 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "!pip install -qU semantic-router==0.0.6"
+    "!pip install -qU semantic-router==0.0.11"
    ]
   },
   {
@@ -46,21 +46,9 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 8,
-   "metadata": {},
-   "outputs": [
-    {
-     "ename": "ImportError",
-     "evalue": "cannot import name 'Route' from 'semantic_router.schema' (/Users/jakit/customers/aurelio/semantic-router/.venv/lib/python3.11/site-packages/semantic_router/schema.py)",
-     "output_type": "error",
-     "traceback": [
-      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
-      "\u001b[0;31mImportError\u001b[0m                               Traceback (most recent call last)",
-      "\u001b[1;32m/Users/jakit/customers/aurelio/semantic-router/docs/examples/hybrid-layer.ipynb Cell 7\u001b[0m line \u001b[0;36m1\n\u001b[0;32m----> <a href='vscode-notebook-cell:/Users/jakit/customers/aurelio/semantic-router/docs/examples/hybrid-layer.ipynb#X10sZmlsZQ%3D%3D?line=0'>1</a>\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39msemantic_router\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mschema\u001b[39;00m \u001b[39mimport\u001b[39;00m Route\n\u001b[1;32m      <a href='vscode-notebook-cell:/Users/jakit/customers/aurelio/semantic-router/docs/examples/hybrid-layer.ipynb#X10sZmlsZQ%3D%3D?line=2'>3</a>\u001b[0m politics \u001b[39m=\u001b[39m Route(\n\u001b[1;32m      <a href='vscode-notebook-cell:/Users/jakit/customers/aurelio/semantic-router/docs/examples/hybrid-layer.ipynb#X10sZmlsZQ%3D%3D?line=3'>4</a>\u001b[0m     name\u001b[39m=\u001b[39m\u001b[39m\"\u001b[39m\u001b[39mpolitics\u001b[39m\u001b[39m\"\u001b[39m,\n\u001b[1;32m      <a href='vscode-notebook-cell:/Users/jakit/customers/aurelio/semantic-router/docs/examples/hybrid-layer.ipynb#X10sZmlsZQ%3D%3D?line=4'>5</a>\u001b[0m     utterances\u001b[39m=\u001b[39m[\n\u001b[0;32m   (...)\u001b[0m\n\u001b[1;32m     <a href='vscode-notebook-cell:/Users/jakit/customers/aurelio/semantic-router/docs/examples/hybrid-layer.ipynb#X10sZmlsZQ%3D%3D?line=11'>12</a>\u001b[0m     ],\n\u001b[1;32m     <a href='vscode-notebook-cell:/Users/jakit/customers/aurelio/semantic-router/docs/examples/hybrid-layer.ipynb#X10sZmlsZQ%3D%3D?line=12'>13</a>\u001b[0m )\n",
-      "\u001b[0;31mImportError\u001b[0m: cannot import name 'Route' from 'semantic_router.schema' (/Users/jakit/customers/aurelio/semantic-router/.venv/lib/python3.11/site-packages/semantic_router/schema.py)"
-     ]
-    }
-   ],
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
    "source": [
     "from semantic_router.schema import Route\n",
     "\n",
@@ -84,6 +72,13 @@
     "Let's define another for good measure:"
    ]
   },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": []
+  },
   {
    "cell_type": "code",
    "execution_count": null,
diff --git a/semantic_router/hybrid_layer.py b/semantic_router/hybrid_layer.py
index dec6336e..9f353e94 100644
--- a/semantic_router/hybrid_layer.py
+++ b/semantic_router/hybrid_layer.py
@@ -1,6 +1,5 @@
 import numpy as np
 from numpy.linalg import norm
-from tqdm.auto import tqdm
 
 from semantic_router.encoders import (
     BaseEncoder,
@@ -34,8 +33,9 @@ class HybridRouteLayer:
         # if routes list has been passed, we initialize index now
         if routes:
             # initialize index now
-            for route in tqdm(routes):
-                self._add_route(route=route)
+            # for route in tqdm(routes):
+            #     self._add_route(route=route)
+            self._add_routes(routes)
 
     def __call__(self, text: str) -> str | None:
         results = self._query(text)
@@ -77,6 +77,38 @@ class HybridRouteLayer:
         else:
             self.sparse_index = np.concatenate([self.sparse_index, sparse_embeds])
 
+    def _add_routes(self, routes: list[Route]):
+        # create embeddings for all routes
+        logger.info("Creating embeddings for all routes...")
+        all_utterances = [
+            utterance for route in routes for utterance in route.utterances
+        ]
+        dense_embeds = np.array(self.encoder(all_utterances))
+        sparse_embeds = np.array(self.sparse_encoder(all_utterances))
+
+        # create route array
+        route_names = [route.name for route in routes for _ in route.utterances]
+        route_array = np.array(route_names)
+        self.categories = (
+            np.concatenate([self.categories, route_array])
+            if self.categories is not None
+            else route_array
+        )
+
+        # create utterance array (the dense index)
+        self.index = (
+            np.concatenate([self.index, dense_embeds])
+            if self.index is not None
+            else dense_embeds
+        )
+
+        # create sparse utterance array
+        self.sparse_index = (
+            np.concatenate([self.sparse_index, sparse_embeds])
+            if self.sparse_index is not None
+            else sparse_embeds
+        )
+
     def _query(self, text: str, top_k: int = 5):
         """Given some text, encodes and searches the index vector space to
         retrieve the top_k most similar records.
diff --git a/semantic_router/utils/logger.py b/semantic_router/utils/logger.py
index a001623a..00c83693 100644
--- a/semantic_router/utils/logger.py
+++ b/semantic_router/utils/logger.py
@@ -22,18 +22,9 @@ class CustomFormatter(colorlog.ColoredFormatter):
 
 def add_coloured_handler(logger):
     formatter = CustomFormatter()
-
     console_handler = logging.StreamHandler()
     console_handler.setFormatter(formatter)
-
-    logging.basicConfig(
-        datefmt="%Y-%m-%d %H:%M:%S",
-        format="%(log_color)s%(asctime)s %(levelname)s %(name)s %(message)s",
-        force=True,
-    )
-
     logger.addHandler(console_handler)
-
     return logger
 
 
diff --git a/tests/unit/test_hybrid_layer.py b/tests/unit/test_hybrid_layer.py
index 94720cd8..c00f887d 100644
--- a/tests/unit/test_hybrid_layer.py
+++ b/tests/unit/test_hybrid_layer.py
@@ -60,7 +60,7 @@ class TestHybridRouteLayer:
     def test_add_route(self, openai_encoder):
         route_layer = HybridRouteLayer(encoder=openai_encoder)
         route = Route(name="Route 3", utterances=["Yes", "No"])
-        route_layer.add(route)
+        route_layer._add_routes([route])
         assert route_layer.index is not None and route_layer.categories is not None
         assert len(route_layer.index) == 2
         assert len(set(route_layer.categories)) == 1
-- 
GitLab