diff --git a/docs/examples/function_calling.ipynb b/docs/examples/function_calling.ipynb
index b45e1d0dd44f87e6c6383e8d9574eb11a1e15084..deb1c32f18e9c4509074d0156cd8f891c9ce3745 100644
--- a/docs/examples/function_calling.ipynb
+++ b/docs/examples/function_calling.ipynb
@@ -46,8 +46,8 @@
       "/Users/jakit/customers/aurelio/semantic-router/.venv/lib/python3.11/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
       "  from .autonotebook import tqdm as notebook_tqdm\n",
       "None of PyTorch, TensorFlow >= 2.0, or Flax have been found. Models won't be available and only tokenizers, configuration and file/data utilities can be used.\n",
-      "\u001b[32m2023-12-19 12:30:53 INFO semantic_router.utils.logger Generating dynamic route...\u001b[0m\n",
-      "\u001b[32m2023-12-19 12:30:58 INFO semantic_router.utils.logger Generated route config:\n",
+      "\u001b[32m2023-12-19 16:06:38 INFO semantic_router.utils.logger Generating dynamic route...\u001b[0m\n",
+      "\u001b[32m2023-12-19 16:06:44 INFO semantic_router.utils.logger Generated route config:\n",
       "{\n",
       "    \"name\": \"get_time\",\n",
       "    \"utterances\": [\n",
@@ -58,8 +58,8 @@
       "        \"Can you give me the time in [location]?\"\n",
       "    ]\n",
       "}\u001b[0m\n",
-      "\u001b[32m2023-12-19 12:30:58 INFO semantic_router.utils.logger Generating dynamic route...\u001b[0m\n",
-      "\u001b[32m2023-12-19 12:31:03 INFO semantic_router.utils.logger Generated route config:\n",
+      "\u001b[32m2023-12-19 16:06:44 INFO semantic_router.utils.logger Generating dynamic route...\u001b[0m\n",
+      "\u001b[32m2023-12-19 16:06:50 INFO semantic_router.utils.logger Generated route config:\n",
       "{\n",
       "    \"name\": \"get_news\",\n",
       "    \"utterances\": [\n",
@@ -74,7 +74,7 @@
     }
    ],
    "source": [
-    "from semantic_router import Route, RouteConfig\n",
+    "from semantic_router.route import Route, RouteConfig\n",
     "\n",
     "functions = [get_time, get_news]\n",
     "routes = []\n",
@@ -88,35 +88,37 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 3,
+   "execution_count": 4,
    "metadata": {},
    "outputs": [
     {
      "name": "stderr",
      "output_type": "stream",
      "text": [
-      "\u001b[32m2023-12-19 12:31:03 INFO semantic_router.utils.logger Added route `get_weather`\u001b[0m\n",
-      "\u001b[32m2023-12-19 12:31:03 INFO semantic_router.utils.logger Removed route `get_time`\u001b[0m\n"
+      "\u001b[32m2023-12-19 16:07:10 INFO semantic_router.utils.logger Added route `get_weather`\u001b[0m\n",
+      "\u001b[32m2023-12-19 16:07:10 INFO semantic_router.utils.logger Removed route `get_weather`\u001b[0m\n"
      ]
     },
     {
      "data": {
       "text/plain": [
-       "[{'name': 'get_news',\n",
+       "[{'name': 'get_time',\n",
+       "  'utterances': [\"What's the time in [location]?\",\n",
+       "   'Can you tell me the time in [location]?',\n",
+       "   'I need to know the time in [location].',\n",
+       "   'What time is it in [location]?',\n",
+       "   'Can you give me the time in [location]?'],\n",
+       "  'description': None},\n",
+       " {'name': 'get_news',\n",
        "  'utterances': ['Tell me the latest news from the US',\n",
        "   \"What's happening in India today?\",\n",
        "   'Get me the top stories from Japan',\n",
        "   'Can you give me the breaking news from Brazil?',\n",
        "   \"What's the latest in Germany?\"],\n",
-       "  'description': None},\n",
-       " {'name': 'get_weather',\n",
-       "  'utterances': ['what is the weather in SF',\n",
-       "   'what is the current temperature in London?',\n",
-       "   \"tomorrow's weather in Paris?\"],\n",
        "  'description': None}]"
       ]
      },
-     "execution_count": 3,
+     "execution_count": 4,
      "metadata": {},
      "output_type": "execute_result"
     }
@@ -134,30 +136,30 @@
     ")\n",
     "route_config.add(get_weather_route)\n",
     "\n",
-    "route_config.remove(\"get_time\")\n",
+    "route_config.remove(\"get_weather\")\n",
     "\n",
     "route_config.to_dict()"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 4,
+   "execution_count": 5,
    "metadata": {},
    "outputs": [
     {
      "data": {
       "text/plain": [
-       "Route(name='get_weather', utterances=['what is the weather in SF', 'what is the current temperature in London?', \"tomorrow's weather in Paris?\"], description=None)"
+       "Route(name='get_time', utterances=[\"What's the time in [location]?\", 'Can you tell me the time in [location]?', 'I need to know the time in [location].', 'What time is it in [location]?', 'Can you give me the time in [location]?'], description=None)"
       ]
      },
-     "execution_count": 4,
+     "execution_count": 5,
      "metadata": {},
      "output_type": "execute_result"
     }
    ],
    "source": [
     "# Get a route by name\n",
-    "route_config.get(\"get_weather\")"
+    "route_config.get(\"get_time\")"
    ]
   },
   {
@@ -176,7 +178,7 @@
      "name": "stderr",
      "output_type": "stream",
      "text": [
-      "\u001b[32m2023-12-19 12:31:50 INFO semantic_router.utils.logger Saving route config to route_config.json\u001b[0m\n"
+      "\u001b[32m2023-12-19 16:04:24 INFO semantic_router.utils.logger Saving route config to route_config.json\u001b[0m\n"
      ]
     }
    ],
@@ -200,7 +202,7 @@
      "name": "stderr",
      "output_type": "stream",
      "text": [
-      "\u001b[32m2023-12-19 12:32:24 INFO semantic_router.utils.logger Loading route config from route_config.json\u001b[0m\n"
+      "\u001b[32m2023-12-19 16:07:16 INFO semantic_router.utils.logger Loading route config from route_config.json\u001b[0m\n"
      ]
     }
    ],
@@ -217,7 +219,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 7,
+   "execution_count": 8,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -226,184 +228,75 @@
     "route_layer = RouteLayer(routes=route_config.routes)"
    ]
   },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "def validate_parameters(function, parameters):\n",
-    "    sig = inspect.signature(function)\n",
-    "    for name, param in sig.parameters.items():\n",
-    "        if name not in parameters:\n",
-    "            return False, f\"Parameter {name} missing from query\"\n",
-    "        if not isinstance(parameters[name], param.annotation):\n",
-    "            return False, f\"Parameter {name} is not of type {param.annotation}\"\n",
-    "    return True, \"Parameters are valid\""
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "def extract_parameters(query: str, function) -> dict:\n",
-    "    logger.info(\"Extracting parameters...\")\n",
-    "    example_query = \"How is the weather in Hawaii right now in International units?\"\n",
-    "\n",
-    "    example_schema = {\n",
-    "        \"name\": \"get_weather\",\n",
-    "        \"description\": \"Useful to get the weather in a specific location\",\n",
-    "        \"signature\": \"(location: str, degree: str) -> str\",\n",
-    "        \"output\": \"<class 'str'>\",\n",
-    "    }\n",
-    "\n",
-    "    example_parameters = {\n",
-    "        \"location\": \"London\",\n",
-    "        \"degree\": \"Celsius\",\n",
-    "    }\n",
-    "\n",
-    "    prompt = f\"\"\"\n",
-    "    You are a helpful assistant designed to output JSON.\n",
-    "    Given the following function schema\n",
-    "    << {get_function_schema(function)} >>\n",
-    "    and query\n",
-    "    << {query} >>\n",
-    "    extract the parameters values from the query, in a valid JSON format.\n",
-    "    Example:\n",
-    "    Input:\n",
-    "    query: {example_query}\n",
-    "    schema: {example_schema}\n",
-    "\n",
-    "    Result: {example_parameters}\n",
-    "\n",
-    "    Input:\n",
-    "    query: {query}\n",
-    "    schema: {get_function_schema(function)}\n",
-    "    Result:\n",
-    "    \"\"\"\n",
-    "\n",
-    "    try:\n",
-    "        ai_message = llm_mistral(prompt)\n",
-    "        ai_message = (\n",
-    "            ai_message.replace(\"Output:\", \"\").replace(\"'\", '\"').strip().rstrip(\",\")\n",
-    "        )\n",
-    "    except Exception as e:\n",
-    "        logger.error(f\"Mistral failed with error {e}, falling back to OpenAI\")\n",
-    "        ai_message = llm_openai(prompt)\n",
-    "\n",
-    "    try:\n",
-    "        parameters = json.loads(ai_message)\n",
-    "        valid, message = validate_parameters(function, parameters)\n",
-    "\n",
-    "        if not valid:\n",
-    "            logger.warning(\n",
-    "                f\"Invalid parameters from Mistral, falling back to OpenAI: {message}\"\n",
-    "            )\n",
-    "            # Fall back to OpenAI\n",
-    "            ai_message = llm_openai(prompt)\n",
-    "            parameters = json.loads(ai_message)\n",
-    "            valid, message = validate_parameters(function, parameters)\n",
-    "            if not valid:\n",
-    "                raise ValueError(message)\n",
-    "\n",
-    "        logger.info(f\"Extracted parameters: {parameters}\")\n",
-    "        return parameters\n",
-    "    except ValueError as e:\n",
-    "        logger.error(f\"Parameter validation error: {str(e)}\")\n",
-    "        return {\"error\": \"Failed to validate parameters\"}"
-   ]
-  },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "Set up calling functions"
+    "Do a function call with functions as tool"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 9,
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "\u001b[32m2023-12-19 16:07:25 INFO semantic_router.utils.logger Extracting function input...\u001b[0m\n"
+     ]
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Calling function: get_time\n"
+     ]
+    },
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "\u001b[31m2023-12-19 16:07:27 ERROR semantic_router.utils.logger Input name missing from query\u001b[0m\n"
+     ]
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Inputs: {'location': 'Stockholm'}\n",
+      "Schema: {'name': 'get_time', 'description': 'Useful to get the time in a specific location', 'signature': '(location: str) -> str', 'output': \"<class 'str'>\"}\n"
+     ]
+    },
+    {
+     "ename": "ValueError",
+     "evalue": "Invalid inputs",
+     "output_type": "error",
+     "traceback": [
+      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
+      "\u001b[0;31mValueError\u001b[0m                                Traceback (most recent call last)",
+      "\u001b[1;32m/Users/jakit/customers/aurelio/semantic-router/docs/examples/function_calling.ipynb Cell 14\u001b[0m line \u001b[0;36m5\n\u001b[1;32m      <a href='vscode-notebook-cell:/Users/jakit/customers/aurelio/semantic-router/docs/examples/function_calling.ipynb#Y115sZmlsZQ%3D%3D?line=0'>1</a>\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39msemantic_router\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mutils\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mfunction_call\u001b[39;00m \u001b[39mimport\u001b[39;00m route_and_execute\n\u001b[1;32m      <a href='vscode-notebook-cell:/Users/jakit/customers/aurelio/semantic-router/docs/examples/function_calling.ipynb#Y115sZmlsZQ%3D%3D?line=2'>3</a>\u001b[0m tools \u001b[39m=\u001b[39m [get_time, get_news]\n\u001b[0;32m----> <a href='vscode-notebook-cell:/Users/jakit/customers/aurelio/semantic-router/docs/examples/function_calling.ipynb#Y115sZmlsZQ%3D%3D?line=4'>5</a>\u001b[0m \u001b[39mawait\u001b[39;00m route_and_execute(\n\u001b[1;32m      <a href='vscode-notebook-cell:/Users/jakit/customers/aurelio/semantic-router/docs/examples/function_calling.ipynb#Y115sZmlsZQ%3D%3D?line=5'>6</a>\u001b[0m     query\u001b[39m=\u001b[39m\u001b[39m\"\u001b[39m\u001b[39mWhat is the time in Stockholm?\u001b[39m\u001b[39m\"\u001b[39m, functions\u001b[39m=\u001b[39mtools, route_layer\u001b[39m=\u001b[39mroute_layer\n\u001b[1;32m      <a href='vscode-notebook-cell:/Users/jakit/customers/aurelio/semantic-router/docs/examples/function_calling.ipynb#Y115sZmlsZQ%3D%3D?line=6'>7</a>\u001b[0m )\n\u001b[1;32m      <a href='vscode-notebook-cell:/Users/jakit/customers/aurelio/semantic-router/docs/examples/function_calling.ipynb#Y115sZmlsZQ%3D%3D?line=7'>8</a>\u001b[0m \u001b[39mawait\u001b[39;00m route_and_execute(\n\u001b[1;32m      <a href='vscode-notebook-cell:/Users/jakit/customers/aurelio/semantic-router/docs/examples/function_calling.ipynb#Y115sZmlsZQ%3D%3D?line=8'>9</a>\u001b[0m     query\u001b[39m=\u001b[39m\u001b[39m\"\u001b[39m\u001b[39mWhat is the tech news in the Lithuania?\u001b[39m\u001b[39m\"\u001b[39m,\n\u001b[1;32m     <a href='vscode-notebook-cell:/Users/jakit/customers/aurelio/semantic-router/docs/examples/function_calling.ipynb#Y115sZmlsZQ%3D%3D?line=9'>10</a>\u001b[0m     functions\u001b[39m=\u001b[39mtools,\n\u001b[1;32m     <a href='vscode-notebook-cell:/Users/jakit/customers/aurelio/semantic-router/docs/examples/function_calling.ipynb#Y115sZmlsZQ%3D%3D?line=10'>11</a>\u001b[0m     route_layer\u001b[39m=\u001b[39mroute_layer,\n\u001b[1;32m     <a href='vscode-notebook-cell:/Users/jakit/customers/aurelio/semantic-router/docs/examples/function_calling.ipynb#Y115sZmlsZQ%3D%3D?line=11'>12</a>\u001b[0m )\n\u001b[1;32m     <a href='vscode-notebook-cell:/Users/jakit/customers/aurelio/semantic-router/docs/examples/function_calling.ipynb#Y115sZmlsZQ%3D%3D?line=12'>13</a>\u001b[0m \u001b[39mawait\u001b[39;00m route_and_execute(query\u001b[39m=\u001b[39m\u001b[39m\"\u001b[39m\u001b[39mHi!\u001b[39m\u001b[39m\"\u001b[39m, functions\u001b[39m=\u001b[39mtools, route_layer\u001b[39m=\u001b[39mroute_layer)\n",
+      "File \u001b[0;32m~/customers/aurelio/semantic-router/semantic_router/utils/function_call.py:125\u001b[0m, in \u001b[0;36mroute_and_execute\u001b[0;34m(query, functions, route_layer)\u001b[0m\n\u001b[1;32m    123\u001b[0m \u001b[39mprint\u001b[39m(\u001b[39mf\u001b[39m\u001b[39m\"\u001b[39m\u001b[39mCalling function: \u001b[39m\u001b[39m{\u001b[39;00mfunction\u001b[39m.\u001b[39m\u001b[39m__name__\u001b[39m\u001b[39m}\u001b[39;00m\u001b[39m\"\u001b[39m)\n\u001b[1;32m    124\u001b[0m schema \u001b[39m=\u001b[39m get_schema(function)\n\u001b[0;32m--> 125\u001b[0m inputs \u001b[39m=\u001b[39m \u001b[39mawait\u001b[39;00m extract_function_inputs(query, schema)\n\u001b[1;32m    126\u001b[0m call_function(function, inputs)\n",
+      "File \u001b[0;32m~/customers/aurelio/semantic-router/semantic_router/utils/function_call.py:83\u001b[0m, in \u001b[0;36mextract_function_inputs\u001b[0;34m(query, function_schema)\u001b[0m\n\u001b[1;32m     81\u001b[0m function_inputs \u001b[39m=\u001b[39m json\u001b[39m.\u001b[39mloads(output)\n\u001b[1;32m     82\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39mnot\u001b[39;00m is_valid_inputs(function_inputs, function_schema):\n\u001b[0;32m---> 83\u001b[0m     \u001b[39mraise\u001b[39;00m \u001b[39mValueError\u001b[39;00m(\u001b[39m\"\u001b[39m\u001b[39mInvalid inputs\u001b[39m\u001b[39m\"\u001b[39m)\n\u001b[1;32m     84\u001b[0m \u001b[39mreturn\u001b[39;00m function_inputs\n",
+      "\u001b[0;31mValueError\u001b[0m: Invalid inputs"
+     ]
+    }
+   ],
    "source": [
-    "from typing import Callable\n",
-    "from semantic_router.layer import RouteLayer\n",
-    "\n",
-    "\n",
-    "def call_function(function: Callable, parameters: dict[str, str]):\n",
-    "    try:\n",
-    "        return function(**parameters)\n",
-    "    except TypeError as e:\n",
-    "        logger.error(f\"Error calling function: {e}\")\n",
-    "\n",
-    "\n",
-    "def call_llm(query: str) -> str:\n",
-    "    try:\n",
-    "        ai_message = llm_mistral(query)\n",
-    "    except Exception as e:\n",
-    "        logger.error(f\"Mistral failed with error {e}, falling back to OpenAI\")\n",
-    "        ai_message = llm_openai(query)\n",
-    "\n",
-    "    return ai_message\n",
+    "from semantic_router.utils.function_call import route_and_execute\n",
     "\n",
-    "\n",
-    "def call(query: str, functions: list[Callable], router: RouteLayer):\n",
-    "    function_name = router(query)\n",
-    "    if not function_name:\n",
-    "        logger.warning(\"No function found\")\n",
-    "        return call_llm(query)\n",
-    "\n",
-    "    for function in functions:\n",
-    "        if function.__name__ == function_name:\n",
-    "            parameters = extract_parameters(query, function)\n",
-    "            print(f\"parameters: {parameters}\")\n",
-    "            return call_function(function, parameters)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "### Workflow"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "Functions as a tool"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": []
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Loading configuration from file\n",
-    "router = RouteLayer.from_json(\"router.json\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
     "tools = [get_time, get_news]\n",
     "\n",
-    "call(query=\"What is the time in Stockholm?\", functions=tools, router=router)\n",
-    "call(query=\"What is the tech news in the Lithuania?\", functions=tools, router=router)\n",
-    "call(query=\"Hi!\", functions=tools, router=router)"
+    "await route_and_execute(\n",
+    "    query=\"What is the time in Stockholm?\", functions=tools, route_layer=route_layer\n",
+    ")\n",
+    "await route_and_execute(\n",
+    "    query=\"What is the tech news in the Lithuania?\",\n",
+    "    functions=tools,\n",
+    "    route_layer=route_layer,\n",
+    ")\n",
+    "await route_and_execute(query=\"Hi!\", functions=tools, route_layer=route_layer)"
    ]
   },
   {
diff --git a/docs/examples/route_config.json b/docs/examples/route_config.json
index d43df43fe78fd98b1b4d65f6c0d1d78ac7b72816..0a02d850653613f2fd4f2a7fc6ca8b13b9f9ae86 100644
--- a/docs/examples/route_config.json
+++ b/docs/examples/route_config.json
@@ -1 +1 @@
-[{"name": "get_news", "utterances": ["Tell me the latest news from the US", "What's happening in India today?", "Get me the top stories from Japan", "Can you give me the breaking news from Brazil?", "What's the latest in Germany?"], "description": null}, {"name": "get_weather", "utterances": ["what is the weather in SF", "what is the current temperature in London?", "tomorrow's weather in Paris?"], "description": null}]
+[{"name": "get_time", "utterances": ["What's the time in [location]?", "Can you tell me the time in [location]?", "I need to know the time in [location].", "What time is it in [location]?", "Can you give me the time in [location]?"], "description": null}, {"name": "get_news", "utterances": ["Tell me the latest news from the US", "What's happening in India today?", "Get me the top stories from Japan", "Can you give me the breaking news from Brazil?", "What's the latest news from Germany?"], "description": null}]
diff --git a/pyproject.toml b/pyproject.toml
index 32cb1fe35b645e89c11421ba0c4f91556f7e8428..b530d476d439a1e7e55e64be50666b370f82d7b0 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -10,6 +10,7 @@ authors = [
     "Bogdan Buduroiu <bogdan@aurelio.ai>"
 ]
 readme = "README.md"
+packages = [{include = "semantic_router"}]
 
 [tool.poetry.dependencies]
 python = "^3.9"
diff --git a/semantic_router/__init__.py b/semantic_router/__init__.py
index 2659bfe3bf4cebe2b022c01ec7139658aeb43eb1..0c445bea3ff4efd8f3aa8950e2c772277d93b20c 100644
--- a/semantic_router/__init__.py
+++ b/semantic_router/__init__.py
@@ -1,5 +1,4 @@
 from .hybrid_layer import HybridRouteLayer
 from .layer import RouteLayer
-from .route import Route, RouteConfig
 
-__all__ = ["RouteLayer", "HybridRouteLayer", "Route", "RouteConfig"]
+__all__ = ["RouteLayer", "HybridRouteLayer"]
diff --git a/semantic_router/hybrid_layer.py b/semantic_router/hybrid_layer.py
index a257e8018fa71dc5c285c564657b91f42aba0de8..ff75369529fd0b116ef7900b3be1463934cdb460 100644
--- a/semantic_router/hybrid_layer.py
+++ b/semantic_router/hybrid_layer.py
@@ -2,7 +2,6 @@ import numpy as np
 from numpy.linalg import norm
 from tqdm.auto import tqdm
 
-from semantic_router import Route
 from semantic_router.encoders import (
     BaseEncoder,
     BM25Encoder,
@@ -11,6 +10,8 @@ from semantic_router.encoders import (
 )
 from semantic_router.utils.logger import logger
 
+from .route import Route
+
 
 class HybridRouteLayer:
     index = None
diff --git a/semantic_router/layer.py b/semantic_router/layer.py
index 72de990081ab09e346647b03d3dd3b9b5379ba94..c0670b916eed760ff524a049c288e766da03796d 100644
--- a/semantic_router/layer.py
+++ b/semantic_router/layer.py
@@ -3,7 +3,6 @@ import json
 import numpy as np
 import yaml
 
-from semantic_router import Route
 from semantic_router.encoders import (
     BaseEncoder,
     CohereEncoder,
@@ -12,6 +11,8 @@ from semantic_router.encoders import (
 from semantic_router.linear import similarity_matrix, top_scores
 from semantic_router.utils.logger import logger
 
+from .route import Route
+
 
 class RouteLayer:
     index = None
diff --git a/semantic_router/route.py b/semantic_router/route.py
index b1fb1e47655d929644d6d136c4e8f4678de7ff5b..c1ec8fc360080d41bdf69a61d3acd8a7a5fcb79e 100644
--- a/semantic_router/route.py
+++ b/semantic_router/route.py
@@ -1,13 +1,13 @@
-import inspect
 import json
 import os
 import re
 from typing import Any, Callable, Union
 
-import openai
 import yaml
 from pydantic import BaseModel
 
+from semantic_router.utils import function_call
+from semantic_router.utils.llm import llm
 from semantic_router.utils.logger import logger
 
 
@@ -59,42 +59,10 @@ class Route(BaseModel):
         """
         Generate a dynamic Route object from a function or Pydantic model using LLM
         """
-        schema = cls._get_schema(item=entity)
+        schema = function_call.get_schema(item=entity)
         dynamic_route = await cls._agenerate_dynamic_route(function_schema=schema)
         return dynamic_route
 
-    @classmethod
-    def _get_schema(cls, item: Union[BaseModel, Callable]) -> dict[str, Any]:
-        if isinstance(item, BaseModel):
-            signature_parts = []
-            for field_name, field_model in item.__annotations__.items():
-                field_info = item.__fields__[field_name]
-                default_value = field_info.default
-
-                if default_value:
-                    default_repr = repr(default_value)
-                    signature_part = (
-                        f"{field_name}: {field_model.__name__} = {default_repr}"
-                    )
-                else:
-                    signature_part = f"{field_name}: {field_model.__name__}"
-
-                signature_parts.append(signature_part)
-            signature = f"({', '.join(signature_parts)}) -> str"
-            schema = {
-                "name": item.__class__.__name__,
-                "description": item.__doc__,
-                "signature": signature,
-            }
-        else:
-            schema = {
-                "name": item.__name__,
-                "description": str(inspect.getdoc(item)),
-                "signature": str(inspect.signature(item)),
-                "output": str(inspect.signature(item).return_annotation),
-            }
-        return schema
-
     @classmethod
     def _parse_route_config(cls, config: str) -> str:
         # Regular expression to match content inside <config></config>
@@ -136,26 +104,10 @@ class Route(BaseModel):
         {function_schema}
         """
 
-        client = openai.AsyncOpenAI(
-            base_url="https://openrouter.ai/api/v1",
-            api_key=os.getenv("OPENROUTER_API_KEY"),
-        )
-
-        completion = await client.chat.completions.create(
-            model="mistralai/mistral-7b-instruct",
-            messages=[
-                {
-                    "role": "user",
-                    "content": prompt,
-                },
-            ],
-            temperature=0.01,
-            max_tokens=200,
-        )
-
-        output = completion.choices[0].message.content
+        output = await llm(prompt)
         if not output:
-            raise Exception("No output generated")
+            raise Exception("No output generated for dynamic route")
+
         route_config = cls._parse_route_config(config=output)
 
         logger.info(f"Generated route config:\n{route_config}")
diff --git a/semantic_router/utils/function_call.py b/semantic_router/utils/function_call.py
new file mode 100644
index 0000000000000000000000000000000000000000..3c0c9a42c9e581eba151984786ff487e3c7fa918
--- /dev/null
+++ b/semantic_router/utils/function_call.py
@@ -0,0 +1,127 @@
+import inspect
+import json
+from typing import Any, Callable, Union
+
+from pydantic import BaseModel
+
+from semantic_router.utils.llm import llm
+from semantic_router.utils.logger import logger
+
+
+def get_schema(item: Union[BaseModel, Callable]) -> dict[str, Any]:
+    if isinstance(item, BaseModel):
+        signature_parts = []
+        for field_name, field_model in item.__annotations__.items():
+            field_info = item.__fields__[field_name]
+            default_value = field_info.default
+
+            if default_value:
+                default_repr = repr(default_value)
+                signature_part = (
+                    f"{field_name}: {field_model.__name__} = {default_repr}"
+                )
+            else:
+                signature_part = f"{field_name}: {field_model.__name__}"
+
+            signature_parts.append(signature_part)
+        signature = f"({', '.join(signature_parts)}) -> str"
+        schema = {
+            "name": item.__class__.__name__,
+            "description": item.__doc__,
+            "signature": signature,
+        }
+    else:
+        schema = {
+            "name": item.__name__,
+            "description": str(inspect.getdoc(item)),
+            "signature": str(inspect.signature(item)),
+            "output": str(inspect.signature(item).return_annotation),
+        }
+    return schema
+
+
+async def extract_function_inputs(query: str, function_schema: dict[str, Any]) -> dict:
+    logger.info("Extracting function input...")
+
+    prompt = f"""
+    You are a helpful assistant designed to output JSON.
+    Given the following function schema
+    << {function_schema} >>
+    and query
+    << {query} >>
+    extract the parameters values from the query, in a valid JSON format.
+    Example:
+    Input:
+    query: "How is the weather in Hawaii right now in International units?"
+    schema:
+    {{
+        "name": "get_weather",
+        "description": "Useful to get the weather in a specific location",
+        "signature": "(location: str, degree: str) -> str",
+        "output": "<class 'str'>",
+    }}
+
+    Result: {{
+        "location": "London",
+        "degree": "Celsius",
+    }}
+
+    Input:
+    query: {query}
+    schema: {function_schema}
+    Result:
+    """
+
+    output = await llm(prompt)
+    if not output:
+        raise Exception("No output generated for extract function input")
+
+    output = output.replace("'", '"').strip().rstrip(",")
+
+    function_inputs = json.loads(output)
+    if not is_valid_inputs(function_inputs, function_schema):
+        raise ValueError("Invalid inputs")
+    return function_inputs
+
+
+def is_valid_inputs(inputs: dict[str, Any], function_schema: dict[str, Any]) -> bool:
+    """Validate the extracted inputs against the function schema"""
+
+    print(f"Inputs: {inputs}")
+
+    print(f"Schema: {function_schema}")
+
+    try:
+        for name, param in function_schema.items():
+            if name not in inputs:
+                logger.error(f"Input {name} missing from query")
+                return False
+            if not isinstance(inputs[name], param["type"]):
+                logger.error(f"Input {name} is not of type {param['type']}")
+                return False
+        return True
+    except Exception as e:
+        logger.error(f"Input validation error: {str(e)}")
+        return False
+
+
+def call_function(function: Callable, inputs: dict[str, str]):
+    try:
+        return function(**inputs)
+    except TypeError as e:
+        logger.error(f"Error calling function: {e}")
+
+
+# TODO: Add route layer object to the input, solve circular import issue
+async def route_and_execute(query: str, functions: list[Callable], route_layer):
+    function_name = route_layer(query)
+    if not function_name:
+        logger.warning("No function found, calling LLM...")
+        return llm(query)
+
+    for function in functions:
+        if function.__name__ == function_name:
+            print(f"Calling function: {function.__name__}")
+            schema = get_schema(function)
+            inputs = await extract_function_inputs(query, schema)
+            call_function(function, inputs)
diff --git a/semantic_router/utils/llm.py b/semantic_router/utils/llm.py
new file mode 100644
index 0000000000000000000000000000000000000000..e912ee1f8ea53cdeaa69a83669384a5d6d165c1c
--- /dev/null
+++ b/semantic_router/utils/llm.py
@@ -0,0 +1,34 @@
+import os
+
+import openai
+
+from semantic_router.utils.logger import logger
+
+
+async def llm(prompt: str) -> str | None:
+    try:
+        client = openai.AsyncOpenAI(
+            base_url="https://openrouter.ai/api/v1",
+            api_key=os.getenv("OPENROUTER_API_KEY"),
+        )
+
+        completion = await client.chat.completions.create(
+            model="mistralai/mistral-7b-instruct",
+            messages=[
+                {
+                    "role": "user",
+                    "content": prompt,
+                },
+            ],
+            temperature=0.01,
+            max_tokens=200,
+        )
+
+        output = completion.choices[0].message.content
+
+        if not output:
+            raise Exception("No output generated")
+        return output
+    except Exception as e:
+        logger.error(f"LLM error: {e}")
+        raise Exception(f"LLM error: {e}")