diff --git a/docs/02-dynamic-routes.ipynb b/docs/02-dynamic-routes.ipynb
index 23eea22806489c3f489fd8f00fadb9a275868eb9..06c4f737be0526d1dba1ea670bdcc09dc2b9d1dd 100644
--- a/docs/02-dynamic-routes.ipynb
+++ b/docs/02-dynamic-routes.ipynb
@@ -42,7 +42,7 @@
     },
     {
       "cell_type": "code",
-      "execution_count": null,
+      "execution_count": 1,
       "metadata": {
         "id": "dLElfRhgur0v"
       },
@@ -71,11 +71,20 @@
     },
     {
       "cell_type": "code",
-      "execution_count": 1,
+      "execution_count": 2,
       "metadata": {
         "id": "kc9Ty6Lgur0x"
       },
-      "outputs": [],
+      "outputs": [
+        {
+          "name": "stderr",
+          "output_type": "stream",
+          "text": [
+            "c:\\Users\\Siraj\\Documents\\Personal\\Work\\Aurelio\\Virtual Environments\\semantic_router\\Lib\\site-packages\\tqdm\\auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
+            "  from .autonotebook import tqdm as notebook_tqdm\n"
+          ]
+        }
+      ],
       "source": [
         "from semantic_router import Route\n",
         "\n",
@@ -127,7 +136,7 @@
           "name": "stderr",
           "output_type": "stream",
           "text": [
-            "\u001b[32m2024-01-08 11:12:24 INFO semantic_router.utils.logger Initializing RouteLayer\u001b[0m\n"
+            "\u001b[32m2024-04-27 02:19:43 INFO semantic_router.utils.logger local\u001b[0m\n"
           ]
         }
       ],
@@ -175,7 +184,7 @@
         {
           "data": {
             "text/plain": [
-              "RouteChoice(name='chitchat', function_call=None)"
+              "RouteChoice(name='chitchat', function_call=None, similarity_score=None)"
             ]
           },
           "execution_count": 4,
@@ -245,11 +254,8 @@
       "outputs": [
         {
           "data": {
-            "application/vnd.google.colaboratory.intrinsic+json": {
-              "type": "string"
-            },
             "text/plain": [
-              "'06:13'"
+              "'18:19'"
             ]
           },
           "execution_count": 6,
@@ -330,6 +336,15 @@
         ")"
       ]
     },
+    {
+      "cell_type": "code",
+      "execution_count": 9,
+      "metadata": {},
+      "outputs": [],
+      "source": [
+        "time_route.llm"
+      ]
+    },
     {
       "cell_type": "markdown",
       "metadata": {
@@ -341,7 +356,7 @@
     },
     {
       "cell_type": "code",
-      "execution_count": 9,
+      "execution_count": 10,
       "metadata": {
         "colab": {
           "base_uri": "https://localhost:8080/"
@@ -354,7 +369,7 @@
           "name": "stderr",
           "output_type": "stream",
           "text": [
-            "\u001b[32m2024-01-08 11:15:26 INFO semantic_router.utils.logger Adding `get_time` route\u001b[0m\n"
+            "\u001b[32m2024-04-27 02:19:45 INFO semantic_router.utils.logger Adding `get_time` route\u001b[0m\n"
           ]
         }
       ],
@@ -362,6 +377,15 @@
         "rl.add(time_route)"
       ]
     },
+    {
+      "cell_type": "code",
+      "execution_count": 11,
+      "metadata": {},
+      "outputs": [],
+      "source": [
+        "time_route.llm"
+      ]
+    },
     {
       "cell_type": "markdown",
       "metadata": {
@@ -373,7 +397,7 @@
     },
     {
       "cell_type": "code",
-      "execution_count": 11,
+      "execution_count": 12,
       "metadata": {
         "colab": {
           "base_uri": "https://localhost:8080/",
@@ -387,19 +411,51 @@
           "name": "stderr",
           "output_type": "stream",
           "text": [
-            "\u001b[32m2024-01-08 11:16:24 INFO semantic_router.utils.logger Extracting function input...\u001b[0m\n"
+            "\u001b[33m2024-04-27 02:19:45 WARNING semantic_router.utils.logger No LLM provided for dynamic route, will use OpenAI LLM default. Ensure API key is set in OPENAI_API_KEY environment variable.\u001b[0m\n",
+            "\u001b[32m2024-04-27 02:19:45 INFO semantic_router.utils.logger Extracting function input...\u001b[0m\n"
+          ]
+        },
+        {
+          "name": "stdout",
+          "output_type": "stream",
+          "text": [
+            "##################################################\n",
+            "tools\n",
+            "None\n",
+            "##################################################\n"
+          ]
+        },
+        {
+          "name": "stderr",
+          "output_type": "stream",
+          "text": [
+            "\u001b[32m2024-04-27 02:19:46 INFO semantic_router.utils.logger LLM output: {\n",
+            "\t\"timezone\": \"America/New_York\"\n",
+            "}\u001b[0m\n",
+            "\u001b[32m2024-04-27 02:19:46 INFO semantic_router.utils.logger Function inputs: {'timezone': 'America/New_York'}\u001b[0m\n"
+          ]
+        },
+        {
+          "name": "stdout",
+          "output_type": "stream",
+          "text": [
+            "##################################################\n",
+            "completion.choices[0].message.tool_calls\n",
+            "None\n",
+            "##################################################\n",
+            "##################################################\n",
+            "extracted_inputs\n",
+            "{'timezone': 'America/New_York'}\n",
+            "##################################################\n"
           ]
         },
         {
           "data": {
-            "application/vnd.google.colaboratory.intrinsic+json": {
-              "type": "string"
-            },
             "text/plain": [
-              "'06:16'"
+              "'18:19'"
             ]
           },
-          "execution_count": 11,
+          "execution_count": 12,
           "metadata": {},
           "output_type": "execute_result"
         }
@@ -409,6 +465,26 @@
         "get_time(**out.function_call)"
       ]
     },
+    {
+      "cell_type": "code",
+      "execution_count": 13,
+      "metadata": {},
+      "outputs": [
+        {
+          "data": {
+            "text/plain": [
+              "OpenAILLM(name='gpt-3.5-turbo', client=<openai.OpenAI object at 0x00000152CAD11ED0>, temperature=0.01, max_tokens=200)"
+            ]
+          },
+          "execution_count": 13,
+          "metadata": {},
+          "output_type": "execute_result"
+        }
+      ],
+      "source": [
+        "time_route.llm"
+      ]
+    },
     {
       "cell_type": "markdown",
       "metadata": {
@@ -447,7 +523,7 @@
       "name": "python",
       "nbconvert_exporter": "python",
       "pygments_lexer": "ipython3",
-      "version": "3.11.5"
+      "version": "3.11.4"
     }
   },
   "nbformat": 4,
diff --git a/docs/10-dynamic-routes-via-openai-function-calling.ipynb b/docs/10-dynamic-routes-via-openai-function-calling.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..2bee09b2fbc0c114593b75fd63462e3cf42e1538
--- /dev/null
+++ b/docs/10-dynamic-routes-via-openai-function-calling.ipynb
@@ -0,0 +1,529 @@
+{
+  "cells": [
+    {
+      "cell_type": "markdown",
+      "metadata": {
+        "id": "UxqB7_Ieur0s"
+      },
+      "source": [
+        "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/aurelio-labs/semantic-router/blob/main/docs/02-dynamic-routes.ipynb) [![Open nbviewer](https://raw.githubusercontent.com/pinecone-io/examples/master/assets/nbviewer-shield.svg)](https://nbviewer.org/github/aurelio-labs/semantic-router/blob/main/docs/02-dynamic-routes.ipynb)"
+      ]
+    },
+    {
+      "cell_type": "markdown",
+      "metadata": {
+        "id": "EduhQaNAur0u"
+      },
+      "source": [
+        "# Dynamic Routes"
+      ]
+    },
+    {
+      "cell_type": "markdown",
+      "metadata": {
+        "id": "_4JgNeX4ur0v"
+      },
+      "source": [
+        "In semantic-router there are two types of routes that can be chosen. Both routes belong to the `Route` object, the only difference between them is that _static_ routes return a `Route.name` when chosen, whereas _dynamic_ routes use an LLM call to produce parameter input values.\n",
+        "\n",
+        "For example, a _static_ route will tell us if a query is talking about mathematics by returning the route name (which could be `\"math\"` for example). A _dynamic_ route can generate additional values, so it may decide a query is talking about maths, but it can also generate Python code that we can later execute to answer the user's query, this output may look like `\"math\", \"import math; output = math.sqrt(64)`.\n",
+        "\n",
+        "***⚠️ Note: We have a fully local version of dynamic routes available at [docs/05-local-execution.ipynb](https://github.com/aurelio-labs/semantic-router/blob/main/docs/05-local-execution.ipynb). The local 05 version tends to outperform the OpenAI version we demo in this notebook, so we'd recommend trying [05](https://github.com/aurelio-labs/semantic-router/blob/main/docs/05-local-execution.ipynb)!***"
+      ]
+    },
+    {
+      "cell_type": "markdown",
+      "metadata": {
+        "id": "bbmw8CO4ur0v"
+      },
+      "source": [
+        "## Installing the Library"
+      ]
+    },
+    {
+      "cell_type": "code",
+      "execution_count": 1,
+      "metadata": {
+        "id": "dLElfRhgur0v"
+      },
+      "outputs": [],
+      "source": [
+        "!pip install -qU semantic-router==0.0.34"
+      ]
+    },
+    {
+      "cell_type": "markdown",
+      "metadata": {
+        "id": "BixZd6Eour0w"
+      },
+      "source": [
+        "## Initializing Routes and RouteLayer"
+      ]
+    },
+    {
+      "cell_type": "markdown",
+      "metadata": {
+        "id": "PxnW9qBvur0x"
+      },
+      "source": [
+        "Dynamic routes are treated in the same way as static routes, let's begin by initializing a `RouteLayer` consisting of static routes."
+      ]
+    },
+    {
+      "cell_type": "code",
+      "execution_count": 2,
+      "metadata": {
+        "id": "kc9Ty6Lgur0x"
+      },
+      "outputs": [
+        {
+          "name": "stderr",
+          "output_type": "stream",
+          "text": [
+            "c:\\Users\\Siraj\\Documents\\Personal\\Work\\Aurelio\\Virtual Environments\\semantic_router\\Lib\\site-packages\\tqdm\\auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
+            "  from .autonotebook import tqdm as notebook_tqdm\n"
+          ]
+        }
+      ],
+      "source": [
+        "from semantic_router import Route\n",
+        "\n",
+        "politics = Route(\n",
+        "    name=\"politics\",\n",
+        "    utterances=[\n",
+        "        \"isn't politics the best thing ever\",\n",
+        "        \"why don't you tell me about your political opinions\",\n",
+        "        \"don't you just love the president\" \"don't you just hate the president\",\n",
+        "        \"they're going to destroy this country!\",\n",
+        "        \"they will save the country!\",\n",
+        "    ],\n",
+        ")\n",
+        "chitchat = Route(\n",
+        "    name=\"chitchat\",\n",
+        "    utterances=[\n",
+        "        \"how's the weather today?\",\n",
+        "        \"how are things going?\",\n",
+        "        \"lovely weather today\",\n",
+        "        \"the weather is horrendous\",\n",
+        "        \"let's go to the chippy\",\n",
+        "    ],\n",
+        ")\n",
+        "\n",
+        "routes = [politics, chitchat]"
+      ]
+    },
+    {
+      "cell_type": "markdown",
+      "metadata": {
+        "id": "voWyqmffur0x"
+      },
+      "source": [
+        "We initialize our `RouteLayer` with our `encoder` and `routes`. We can use popular encoder APIs like `CohereEncoder` and `OpenAIEncoder`, or local alternatives like `FastEmbedEncoder`."
+      ]
+    },
+    {
+      "cell_type": "code",
+      "execution_count": 3,
+      "metadata": {
+        "colab": {
+          "base_uri": "https://localhost:8080/"
+        },
+        "id": "BI9AiDspur0y",
+        "outputId": "27329a54-3f16-44a5-ac20-13a6b26afb97"
+      },
+      "outputs": [
+        {
+          "name": "stderr",
+          "output_type": "stream",
+          "text": [
+            "\u001b[32m2024-04-27 02:23:29 INFO semantic_router.utils.logger local\u001b[0m\n"
+          ]
+        }
+      ],
+      "source": [
+        "import os\n",
+        "from getpass import getpass\n",
+        "from semantic_router import RouteLayer\n",
+        "from semantic_router.encoders import CohereEncoder, OpenAIEncoder\n",
+        "\n",
+        "# dashboard.cohere.ai\n",
+        "# os.environ[\"COHERE_API_KEY\"] = os.getenv(\"COHERE_API_KEY\") or getpass(\n",
+        "#     \"Enter Cohere API Key: \"\n",
+        "# )\n",
+        "# platform.openai.com\n",
+        "os.environ[\"OPENAI_API_KEY\"] = os.getenv(\"OPENAI_API_KEY\") or getpass(\n",
+        "    \"Enter OpenAI API Key: \"\n",
+        ")\n",
+        "\n",
+        "# encoder = CohereEncoder()\n",
+        "encoder = OpenAIEncoder()\n",
+        "\n",
+        "rl = RouteLayer(encoder=encoder, routes=routes)"
+      ]
+    },
+    {
+      "cell_type": "markdown",
+      "metadata": {
+        "id": "GuLCeIS5ur0y"
+      },
+      "source": [
+        "We run the solely static routes layer:"
+      ]
+    },
+    {
+      "cell_type": "code",
+      "execution_count": 4,
+      "metadata": {
+        "colab": {
+          "base_uri": "https://localhost:8080/"
+        },
+        "id": "_rNREh7gur0y",
+        "outputId": "f3a1dc0b-d760-4efb-b634-d3547011dcb7"
+      },
+      "outputs": [
+        {
+          "data": {
+            "text/plain": [
+              "RouteChoice(name='chitchat', function_call=None, similarity_score=None)"
+            ]
+          },
+          "execution_count": 4,
+          "metadata": {},
+          "output_type": "execute_result"
+        }
+      ],
+      "source": [
+        "rl(\"how's the weather today?\")"
+      ]
+    },
+    {
+      "cell_type": "markdown",
+      "metadata": {
+        "id": "McbLKO26ur0y"
+      },
+      "source": [
+        "## Creating a Dynamic Route"
+      ]
+    },
+    {
+      "cell_type": "markdown",
+      "metadata": {
+        "id": "ANAoEjxYur0y"
+      },
+      "source": [
+        "As with static routes, we must create a dynamic route before adding it to our route layer. To make a route dynamic, we need to provide a `function_schema`. The function schema provides instructions on what a function is, so that an LLM can decide how to use it correctly."
+      ]
+    },
+    {
+      "cell_type": "code",
+      "execution_count": 5,
+      "metadata": {
+        "id": "5jaF1Xa5ur0y"
+      },
+      "outputs": [],
+      "source": [
+        "from datetime import datetime\n",
+        "from zoneinfo import ZoneInfo\n",
+        "\n",
+        "\n",
+        "def get_time(timezone: str) -> str:\n",
+        "    \"\"\"Finds the current time in a specific timezone.\n",
+        "\n",
+        "    :param timezone: The timezone to find the current time in, should\n",
+        "        be a valid timezone from the IANA Time Zone Database like\n",
+        "        \"America/New_York\" or \"Europe/London\". Do NOT put the place\n",
+        "        name itself like \"rome\", or \"new york\", you must provide\n",
+        "        the IANA format.\n",
+        "    :type timezone: str\n",
+        "    :return: The current time in the specified timezone.\"\"\"\n",
+        "    now = datetime.now(ZoneInfo(timezone))\n",
+        "    return now.strftime(\"%H:%M\")"
+      ]
+    },
+    {
+      "cell_type": "code",
+      "execution_count": 6,
+      "metadata": {
+        "colab": {
+          "base_uri": "https://localhost:8080/",
+          "height": 35
+        },
+        "id": "YyFKV8jMur0z",
+        "outputId": "29cf80f4-552c-47bb-fbf9-019f5dfdf00a"
+      },
+      "outputs": [
+        {
+          "data": {
+            "text/plain": [
+              "'18:23'"
+            ]
+          },
+          "execution_count": 6,
+          "metadata": {},
+          "output_type": "execute_result"
+        }
+      ],
+      "source": [
+        "get_time(\"America/New_York\")"
+      ]
+    },
+    {
+      "cell_type": "markdown",
+      "metadata": {
+        "id": "4qyaRuNXur0z"
+      },
+      "source": [
+        "To get the function schema we can use the `get_schema` function from the `function_call` module."
+      ]
+    },
+    {
+      "cell_type": "code",
+      "execution_count": 7,
+      "metadata": {
+        "colab": {
+          "base_uri": "https://localhost:8080/"
+        },
+        "id": "tOjuhp5Xur0z",
+        "outputId": "ca88a3ea-d70a-4950-be9a-63fab699de3b"
+      },
+      "outputs": [
+        {
+          "data": {
+            "text/plain": [
+              "{'name': 'get_time',\n",
+              " 'description': 'Finds the current time in a specific timezone.\\n\\n:param timezone: The timezone to find the current time in, should\\n    be a valid timezone from the IANA Time Zone Database like\\n    \"America/New_York\" or \"Europe/London\". Do NOT put the place\\n    name itself like \"rome\", or \"new york\", you must provide\\n    the IANA format.\\n:type timezone: str\\n:return: The current time in the specified timezone.',\n",
+              " 'signature': '(timezone: str) -> str',\n",
+              " 'output': \"<class 'str'>\"}"
+            ]
+          },
+          "execution_count": 7,
+          "metadata": {},
+          "output_type": "execute_result"
+        }
+      ],
+      "source": [
+        "from semantic_router.utils.function_call import get_schema\n",
+        "\n",
+        "schema = get_schema(get_time)\n",
+        "schema"
+      ]
+    },
+    {
+      "cell_type": "code",
+      "execution_count": 8,
+      "metadata": {},
+      "outputs": [
+        {
+          "data": {
+            "text/plain": [
+              "{'type': 'function',\n",
+              " 'function': {'name': 'get_time',\n",
+              "  'description': 'Finds the current time in a specific timezone.\\n\\n:param timezone: The timezone to find the current time in, should\\n    be a valid timezone from the IANA Time Zone Database like\\n    \"America/New_York\" or \"Europe/London\". Do NOT put the place\\n    name itself like \"rome\", or \"new york\", you must provide\\n    the IANA format.\\n:type timezone: str\\n:return: The current time in the specified timezone.',\n",
+              "  'parameters': {'type': 'object',\n",
+              "   'properties': {'timezone': {'type': 'string',\n",
+              "     'description': 'The timezone to find the current time in, should\\n    be a valid timezone from the IANA Time Zone Database like\\n    \"America/New_York\" or \"Europe/London\". Do NOT put the place\\n    name itself like \"rome\", or \"new york\", you must provide\\n    the IANA format.'}},\n",
+              "   'required': ['timezone']}}}"
+            ]
+          },
+          "execution_count": 8,
+          "metadata": {},
+          "output_type": "execute_result"
+        }
+      ],
+      "source": [
+        "from semantic_router.utils.function_call import get_schema_openai_func_calling\n",
+        "\n",
+        "openai_function_schema = get_schema_openai_func_calling(get_time)\n",
+        "openai_function_schema"
+      ]
+    },
+    {
+      "cell_type": "markdown",
+      "metadata": {
+        "id": "HcF7jGjAur0z"
+      },
+      "source": [
+        "We use this to define our dynamic route:"
+      ]
+    },
+    {
+      "cell_type": "code",
+      "execution_count": 9,
+      "metadata": {
+        "id": "iesBG9P3ur0z"
+      },
+      "outputs": [],
+      "source": [
+        "time_route = Route(\n",
+        "    name=\"get_time\",\n",
+        "    utterances=[\n",
+        "        \"what is the time in new york city?\",\n",
+        "        \"what is the time in london?\",\n",
+        "        \"I live in Rome, what time is it?\",\n",
+        "    ],\n",
+        "    # function_schema=schema,\n",
+        "    openai_function_schema=openai_function_schema,\n",
+        ")"
+      ]
+    },
+    {
+      "cell_type": "markdown",
+      "metadata": {
+        "id": "ZiUs3ovpur0z"
+      },
+      "source": [
+        "Add the new route to our `layer`:"
+      ]
+    },
+    {
+      "cell_type": "code",
+      "execution_count": 10,
+      "metadata": {
+        "colab": {
+          "base_uri": "https://localhost:8080/"
+        },
+        "id": "-0vY8PRXur0z",
+        "outputId": "db01e14c-eab3-4f93-f4c2-e30f508c8b5d"
+      },
+      "outputs": [
+        {
+          "name": "stderr",
+          "output_type": "stream",
+          "text": [
+            "\u001b[32m2024-04-27 02:23:30 INFO semantic_router.utils.logger Adding `get_time` route\u001b[0m\n"
+          ]
+        }
+      ],
+      "source": [
+        "rl.add(time_route)"
+      ]
+    },
+    {
+      "cell_type": "markdown",
+      "metadata": {
+        "id": "7yoE0IrNur0z"
+      },
+      "source": [
+        "Now we can ask our layer a time related question to trigger our new dynamic route."
+      ]
+    },
+    {
+      "cell_type": "code",
+      "execution_count": 11,
+      "metadata": {
+        "colab": {
+          "base_uri": "https://localhost:8080/",
+          "height": 53
+        },
+        "id": "Wfb68M0-ur0z",
+        "outputId": "79923883-2a4d-4744-f8ce-e818cb5f14c3"
+      },
+      "outputs": [
+        {
+          "name": "stderr",
+          "output_type": "stream",
+          "text": [
+            "\u001b[33m2024-04-27 02:23:31 WARNING semantic_router.utils.logger No LLM provided for dynamic route, will use OpenAI LLM default. Ensure API key is set in OPENAI_API_KEY environment variable.\u001b[0m\n"
+          ]
+        },
+        {
+          "name": "stdout",
+          "output_type": "stream",
+          "text": [
+            "##################################################\n",
+            "tools\n",
+            "[{'type': 'function', 'function': {'name': 'get_time', 'description': 'Finds the current time in a specific timezone.\\n\\n:param timezone: The timezone to find the current time in, should\\n    be a valid timezone from the IANA Time Zone Database like\\n    \"America/New_York\" or \"Europe/London\". Do NOT put the place\\n    name itself like \"rome\", or \"new york\", you must provide\\n    the IANA format.\\n:type timezone: str\\n:return: The current time in the specified timezone.', 'parameters': {'type': 'object', 'properties': {'timezone': {'type': 'string', 'description': 'The timezone to find the current time in, should\\n    be a valid timezone from the IANA Time Zone Database like\\n    \"America/New_York\" or \"Europe/London\". Do NOT put the place\\n    name itself like \"rome\", or \"new york\", you must provide\\n    the IANA format.'}}, 'required': ['timezone']}}}]\n",
+            "##################################################\n",
+            "##################################################\n",
+            "completion.choices[0].message.tool_calls\n",
+            "[ChatCompletionMessageToolCall(id='call_1nkq7oE7uqElxy9uBbPwVZcQ', function=Function(arguments='{\"timezone\":\"America/New_York\"}', name='get_time'), type='function')]\n",
+            "##################################################\n",
+            "##################################################\n",
+            "output\n",
+            "[ChatCompletionMessageToolCall(id='call_1nkq7oE7uqElxy9uBbPwVZcQ', function=Function(arguments='{\"timezone\":\"America/New_York\"}', name='get_time'), type='function')]\n",
+            "##################################################\n",
+            "##################################################\n",
+            "function_inputs\n",
+            "{'timezone': 'America/New_York'}\n",
+            "##################################################\n"
+          ]
+        },
+        {
+          "data": {
+            "text/plain": [
+              "RouteChoice(name='get_time', function_call={'timezone': 'America/New_York'}, similarity_score=None)"
+            ]
+          },
+          "execution_count": 11,
+          "metadata": {},
+          "output_type": "execute_result"
+        }
+      ],
+      "source": [
+        "out = rl(\"what is the time in new york city?\")"
+      ]
+    },
+    {
+      "cell_type": "code",
+      "execution_count": 13,
+      "metadata": {},
+      "outputs": [
+        {
+          "data": {
+            "text/plain": [
+              "'18:23'"
+            ]
+          },
+          "execution_count": 13,
+          "metadata": {},
+          "output_type": "execute_result"
+        }
+      ],
+      "source": [
+        "get_time(**out.function_call)"
+      ]
+    },
+    {
+      "cell_type": "markdown",
+      "metadata": {
+        "id": "Qt0vkq2Xur00"
+      },
+      "source": [
+        "Our dynamic route provides both the route itself _and_ the input parameters required to use the route."
+      ]
+    },
+    {
+      "cell_type": "markdown",
+      "metadata": {
+        "id": "J0oD1dxIur00"
+      },
+      "source": [
+        "---"
+      ]
+    }
+  ],
+  "metadata": {
+    "colab": {
+      "provenance": []
+    },
+    "kernelspec": {
+      "display_name": "decision-layer",
+      "language": "python",
+      "name": "python3"
+    },
+    "language_info": {
+      "codemirror_mode": {
+        "name": "ipython",
+        "version": 3
+      },
+      "file_extension": ".py",
+      "mimetype": "text/x-python",
+      "name": "python",
+      "nbconvert_exporter": "python",
+      "pygments_lexer": "ipython3",
+      "version": "3.11.4"
+    }
+  },
+  "nbformat": 4,
+  "nbformat_minor": 0
+}
diff --git a/semantic_router/layer.py b/semantic_router/layer.py
index a138893a88427fc7d7791f085d3508ee67bd6879..30ffee4189cffd5ba9361ca7fb7b96afe184bb1b 100644
--- a/semantic_router/layer.py
+++ b/semantic_router/layer.py
@@ -244,10 +244,15 @@ class RouteLayer:
         passed = self._check_threshold(top_class_scores, route)
 
         if passed and route is not None and not simulate_static:
-            if route.function_schema and text is None:
-                raise ValueError(
-                    "Route has a function schema, but no text was provided."
-                )
+            if text is None:
+                if route.function_schema:
+                    raise ValueError(
+                        "Route has a function schema, but no text was provided."
+                    )
+                if route.openai_function_schema:
+                    raise ValueError(
+                        "Route has an OpenAI function schema, but no text was provided."
+                    )
             if route.function_schema and not isinstance(route.llm, BaseLLM):
                 if not self.llm:
                     logger.warning(
@@ -260,6 +265,22 @@ class RouteLayer:
                     route.llm = self.llm
                 else:
                     route.llm = self.llm
+            if route.openai_function_schema and not isinstance(route.llm, BaseLLM):
+                if not self.llm:
+                    logger.warning(
+                        "No LLM provided for dynamic route, will use OpenAI LLM "
+                        "default. Ensure API key is set in OPENAI_API_KEY environment "
+                        "variable."
+                    )
+
+                    self.llm = OpenAILLM()
+                    route.llm = self.llm
+                else:
+                    if not isinstance(self.llm, OpenAILLM):
+                        raise TypeError(
+                            "LLM must be an instance of OpenAILLM for openai_function_schema."
+                        )
+                    route.llm = self.llm
             return route(text)
         elif passed and route is not None and simulate_static:
             return RouteChoice(
diff --git a/semantic_router/llms/openai.py b/semantic_router/llms/openai.py
index 892375945f4064c4b9af1f70e4be3463a4993d0d..500ef8ffc57acdba160d65b4041102dce5f404dd 100644
--- a/semantic_router/llms/openai.py
+++ b/semantic_router/llms/openai.py
@@ -1,5 +1,5 @@
 import os
-from typing import List, Optional
+from typing import Any, List, Optional
 
 import openai
 
@@ -7,7 +7,7 @@ from semantic_router.llms import BaseLLM
 from semantic_router.schema import Message
 from semantic_router.utils.defaults import EncoderDefault
 from semantic_router.utils.logger import logger
-
+import json
 
 class OpenAILLM(BaseLLM):
     client: Optional[openai.OpenAI]
@@ -36,18 +36,47 @@ class OpenAILLM(BaseLLM):
         self.temperature = temperature
         self.max_tokens = max_tokens
 
-    def __call__(self, messages: List[Message]) -> str:
+    def __call__(self, messages: List[Message], function_schema: dict = None) -> str:
         if self.client is None:
             raise ValueError("OpenAI client is not initialized.")
         try:
+            if function_schema:
+                tools = [function_schema] 
+            else:
+                tools = None
+            # DEBUGGING: Start.
+            print('#'*50)
+            print('tools')
+            print(tools)
+            print('#'*50)
+            # DEBUGGING: End.
             completion = self.client.chat.completions.create(
                 model=self.name,
                 messages=[m.to_openai() for m in messages],
                 temperature=self.temperature,
                 max_tokens=self.max_tokens,
+                tools=tools,
             )
 
             output = completion.choices[0].message.content
+            # DEBUGGING: Start.
+            print('#'*50)
+            # print('print(completion.choices[0].message.function_call)')
+            # print(print(completion.choices[0].message.function_call))
+            print('completion.choices[0].message.tool_calls')
+            print(completion.choices[0].message.tool_calls)
+            print('#'*50)
+            # DEBUGGING: End.
+
+            if function_schema:
+                return completion.choices[0].message.tool_calls
+                # tool_calls = completion.choices[0].message.tool_calls
+                # if not tool_calls:
+                #     raise Exception("No tool calls available in the completion response.")
+                # tool_call = tool_calls[0]
+                # arguments_json = tool_call.function.arguments
+                # arguments_dict = json.loads(arguments_json)
+                # return arguments_dict
 
             if not output:
                 raise Exception("No output generated")
@@ -55,3 +84,34 @@ class OpenAILLM(BaseLLM):
         except Exception as e:
             logger.error(f"LLM error: {e}")
             raise Exception(f"LLM error: {e}") from e
+#
+    def extract_function_inputs_openai(self, query: str, function_schema: dict) -> dict:
+        messages = []
+        # TODO: Finish system prompt.
+        system_prompt = "You are an intelligent AI. Given a command or request from the user, call the function to complete the request."
+        messages.append(Message(role="system", content=system_prompt))
+        messages.append(Message(role="user", content=query))
+        output = self(messages=messages, function_schema=function_schema)
+        if not output:
+            raise Exception("No output generated for extract function input")
+        # DEBUGGING: Start.
+        print('#'*50)
+        print('output')
+        print(output)
+        print('#'*50)
+        # DEBUGGING: End.
+        if len(output) != 1:
+            raise ValueError("Invalid output, expected a single tool to be called")
+        tool_call = output[0]
+        arguments_json = tool_call.function.arguments
+        function_inputs = json.loads(arguments_json)
+
+        # DEBUGGING: Start.
+        print('#'*50)
+        print('function_inputs')
+        print(function_inputs)
+        print('#'*50)
+        # DEBUGGING: End.
+
+        return function_inputs
+                
\ No newline at end of file
diff --git a/semantic_router/route.py b/semantic_router/route.py
index 3d46a8b4f4578ce90da8984d60a8a85956341ed3..b0580885de33365188bdcc111b55bdd73ae2e3ee 100644
--- a/semantic_router/route.py
+++ b/semantic_router/route.py
@@ -8,6 +8,7 @@ from semantic_router.llms import BaseLLM
 from semantic_router.schema import Message, RouteChoice
 from semantic_router.utils import function_call
 from semantic_router.utils.logger import logger
+from semantic_router.llms import OpenAILLM
 
 try:
     from PIL.Image import Image
@@ -48,6 +49,7 @@ class Route(BaseModel):
     utterances: Union[List[str], List[Union[Any, "Image"]]]
     description: Optional[str] = None
     function_schema: Optional[Dict[str, Any]] = None
+    openai_function_schema: Optional[Dict[str, Any]] = None
     llm: Optional[BaseLLM] = None
     score_threshold: Optional[float] = None
 
@@ -55,7 +57,11 @@ class Route(BaseModel):
         arbitrary_types_allowed = True
 
     def __call__(self, query: Optional[str] = None) -> RouteChoice:
-        if self.function_schema:
+        if self.function_schema and self.openai_function_schema:
+            raise ValueError(
+                "Both function_schema and openai_function_schema cannot be provided. Please provide only one."
+            )
+        if self.function_schema or self.openai_function_schema:
             if not self.llm:
                 raise ValueError(
                     "LLM is required for dynamic routes. Please ensure the `llm` "
@@ -66,10 +72,23 @@ class Route(BaseModel):
                     "Query is required for dynamic routes. Please ensure the `query` "
                     "argument is passed."
                 )
-            # if a function schema is provided we generate the inputs
+        if self.function_schema:
             extracted_inputs = self.llm.extract_function_inputs(
                 query=query, function_schema=self.function_schema
             )
+            # DEBUGGING: Start.
+            print('#'*50)
+            print('extracted_inputs')
+            print(extracted_inputs)
+            print('#'*50)
+            # DEBUGGING: End.
+            func_call = extracted_inputs
+        elif self.openai_function_schema:
+            if not isinstance(self.llm, OpenAILLM):
+                raise TypeError("LLM must be an instance of OpenAILLM for openai_function_schema.")
+            extracted_inputs = self.llm.extract_function_inputs_openai(
+                query=query, function_schema=self.openai_function_schema
+            )
             func_call = extracted_inputs
         else:
             # otherwise we just pass None for the call
diff --git a/semantic_router/utils/function_call.py b/semantic_router/utils/function_call.py
index 1c60aa6b3f3ddbabc86e9fa41e0879c169c97b69..c5e3a355691300e00b1003d1ad2cd4a336cd7aa4 100644
--- a/semantic_router/utils/function_call.py
+++ b/semantic_router/utils/function_call.py
@@ -6,7 +6,7 @@ from pydantic.v1 import BaseModel
 from semantic_router.llms import BaseLLM
 from semantic_router.schema import Message, RouteChoice
 from semantic_router.utils.logger import logger
-
+import re
 
 def get_schema(item: Union[BaseModel, Callable]) -> Dict[str, Any]:
     if isinstance(item, BaseModel):
@@ -39,6 +39,63 @@ def get_schema(item: Union[BaseModel, Callable]) -> Dict[str, Any]:
         }
     return schema
 
+def convert_param_type_to_json_type(param_type: str) -> str:
+    if param_type == "int":
+        return "number"
+    if param_type == "float":
+        return "number"
+    if param_type == "str":
+        return "string"
+    if param_type == "bool":
+        return "boolean"
+    if param_type == "NoneType":
+        return "null"
+    if param_type == "list":
+        return "array"
+    else:
+        return "object"
+
+def get_schema_openai_func_calling(item: Callable) -> Dict[str, Any]:
+    if not callable(item):
+        raise ValueError("Provided item must be a callable function.")
+    
+    docstring = inspect.getdoc(item)
+    signature = inspect.signature(item)
+    
+    schema = {
+        "type": "function",
+        "function": {
+            "name": item.__name__,
+            "description": docstring if docstring else "No description available.",
+            "parameters": {
+                "type": "object",
+                "properties": {},
+                "required": []
+            }
+        }
+    }
+    
+    for param_name, param in signature.parameters.items():
+        param_type = param.annotation.__name__ if param.annotation != inspect.Parameter.empty else "Any"
+        param_description = "No description available."
+        param_required = param.default is inspect.Parameter.empty
+        
+        # Attempt to extract the parameter description from the docstring
+        if docstring:
+            param_doc_regex = re.compile(rf":param {param_name}:(.*?)\n(?=:\w|$)", re.S)
+            match = param_doc_regex.search(docstring)
+            if match:
+                param_description = match.group(1).strip()
+        
+        schema["function"]["parameters"]["properties"][param_name] = {
+            "type": convert_param_type_to_json_type(param_type),
+            "description": param_description
+        }
+        
+        if param_required:
+            schema["function"]["parameters"]["required"].append(param_name)
+    
+    return schema
 
 # TODO: Add route layer object to the input, solve circular import issue
 async def route_and_execute(