diff --git a/semantic_router/route.py b/semantic_router/route.py
index 6cca7eaf7aae943ab7adf97c1369e7b943f1a655..2289825071a39652aeaa467395103d71dc623140 100644
--- a/semantic_router/route.py
+++ b/semantic_router/route.py
@@ -96,29 +96,29 @@ class Route(BaseModel):
         logger.info("Generating dynamic route...")
 
         prompt = f"""
-        You are tasked to generate a JSON configuration based on the provided
-        function schema. Please follow the template below, no other tokens allowed:
-
-        <config>
-        {{
-            "name": "<function_name>",
-            "utterances": [
-                "<example_utterance_1>",
-                "<example_utterance_2>",
-                "<example_utterance_3>",
-                "<example_utterance_4>",
-                "<example_utterance_5>"]
-        }}
-        </config>
-
-        Only include the "name" and "utterances" keys in your answer.
-        The "name" should match the function name and the "utterances"
-        should comprise a list of 5 example phrases that could be used to invoke
-        the function. Use real values instead of placeholders.
-
-        Input schema:
-        {function_schema}
-        """
+You are tasked to generate a JSON configuration based on the provided
+function schema. Please follow the template below, no other tokens allowed:
+
+<config>
+{{
+    "name": "<function_name>",
+    "utterances": [
+        "<example_utterance_1>",
+        "<example_utterance_2>",
+        "<example_utterance_3>",
+        "<example_utterance_4>",
+        "<example_utterance_5>"]
+}}
+</config>
+
+Only include the "name" and "utterances" keys in your answer.
+The "name" should match the function name and the "utterances"
+should comprise a list of 5 example phrases that could be used to invoke
+the function. Use real values instead of placeholders.
+
+Input schema:
+{function_schema}
+"""
 
         llm_input = [Message(role="user", content=prompt)]
         output = llm(llm_input)
diff --git a/semantic_router/utils/function_call.py b/semantic_router/utils/function_call.py
index cedd9b6ecd86131b630cf6d4921848604dc88fa0..3c8b3277b3b4cdd8d3c2ebc1849ff3da4cbd1ca7 100644
--- a/semantic_router/utils/function_call.py
+++ b/semantic_router/utils/function_call.py
@@ -47,33 +47,36 @@ def extract_function_inputs(
     logger.info("Extracting function input...")
 
     prompt = f"""
-    You are a helpful assistant designed to output JSON.
-    Given the following function schema
-    << {function_schema} >>
-    and query
-    << {query} >>
-    extract the parameters values from the query, in a valid JSON format.
-    Example:
-    Input:
-    query: "How is the weather in Hawaii right now in International units?"
-    schema:
-    {{
-        "name": "get_weather",
-        "description": "Useful to get the weather in a specific location",
-        "signature": "(location: str, degree: str) -> str",
-        "output": "<class 'str'>",
-    }}
-
-    Result: {{
-        "location": "London",
-        "degree": "Celsius",
-    }}
-
-    Input:
-    query: {query}
-    schema: {function_schema}
-    Result:
-    """
+You are a helpful assistant designed to output JSON.
+Given the following function schema
+<< {function_schema} >>
+and query
+<< {query} >>
+extract the parameters values from the query, in a valid JSON format.
+Example:
+Input:
+query: "How is the weather in Hawaii right now in International units?"
+schema:
+{{
+    "name": "get_weather",
+    "description": "Useful to get the weather in a specific location",
+    "signature": "(location: str, degree: str) -> float",
+    "output": "<class 'float'>",
+}}
+
+Result:
+{{
+    "location": "Hawaii",
+    "degree": "Kelvin",
+}}
+
+Input:
+query: \"{query}\"
+schema:
+{json.dumps(function_schema, indent=4)}
+
+Result:
+"""
     llm_input = [Message(role="user", content=prompt)]
     output = llm(llm_input)
     if not output: