diff --git a/semantic_router/layer.py b/semantic_router/layer.py
index 5d56aefb96f86881838b960c9c80f02b28f40f29..1e8a813066960f2428dc04df4b4780112c21847d 100644
--- a/semantic_router/layer.py
+++ b/semantic_router/layer.py
@@ -6,6 +6,7 @@ from typing import Any, Dict, List, Optional, Tuple, Union
 import numpy as np
 import yaml
 from tqdm.auto import tqdm
+import importlib
 
 from semantic_router.encoders import BaseEncoder, OpenAIEncoder
 from semantic_router.llms import BaseLLM, OpenAILLM
@@ -78,7 +79,6 @@ class LayerConfig:
 
     @classmethod
     def from_file(cls, path: str) -> "LayerConfig":
-        """Load the routes from a file in JSON or YAML format"""
         logger.info(f"Loading route config from {path}")
         _, ext = os.path.splitext(path)
         with open(path, "r") as f:
@@ -91,17 +91,37 @@ class LayerConfig:
                     "Unsupported file type. Only .json and .yaml are supported"
                 )
 
-            route_config_str = json.dumps(layer)
-            if is_valid(route_config_str):
-                encoder_type = layer["encoder_type"]
-                encoder_name = layer["encoder_name"]
-                routes = [Route.from_dict(route) for route in layer["routes"]]
-                return cls(
-                    encoder_type=encoder_type, encoder_name=encoder_name, routes=routes
-                )
-            else:
+            if not is_valid(json.dumps(layer)):
                 raise Exception("Invalid config JSON or YAML")
 
+            encoder_type = layer["encoder_type"]
+            encoder_name = layer["encoder_name"]
+            routes = []
+            for route_data in layer["routes"]:
+                # Handle the 'llm' field specially if it exists
+                if "llm" in route_data and route_data["llm"] is not None:
+                    llm_data = route_data.pop(
+                        "llm"
+                    )  # Remove 'llm' from route_data and handle it separately
+                    # Use the module path directly from llm_data without modification
+                    llm_module_path = llm_data["module"]
+                    # Dynamically import the module and then the class from that module
+                    llm_module = importlib.import_module(llm_module_path)
+                    llm_class = getattr(llm_module, llm_data["class"])
+                    # Instantiate the LLM class with the provided model name
+                    llm = llm_class(name=llm_data["model"])
+                    route_data[
+                        "llm"
+                    ] = llm  # Reassign the instantiated llm object back to route_data
+
+                # Dynamically create the Route object using the remaining route_data
+                route = Route(**route_data)
+                routes.append(route)
+
+            return cls(
+                encoder_type=encoder_type, encoder_name=encoder_name, routes=routes
+            )
+
     def to_dict(self) -> Dict[str, Any]:
         return {
             "encoder_type": self.encoder_type,
diff --git a/semantic_router/route.py b/semantic_router/route.py
index 1fe9983d8b4bd6bed120ca0582610e446b19bfc2..a1e0e9b0d23e622484792c90ab0fee1064ac01d9 100644
--- a/semantic_router/route.py
+++ b/semantic_router/route.py
@@ -68,8 +68,18 @@ class Route(BaseModel):
             func_call = None
         return RouteChoice(name=self.name, function_call=func_call)
 
+    # def to_dict(self) -> Dict[str, Any]:
+    #     return self.dict()
+
     def to_dict(self) -> Dict[str, Any]:
-        return self.dict()
+        data = self.dict()
+        if self.llm is not None:
+            data["llm"] = {
+                "module": self.llm.__module__,
+                "class": self.llm.__class__.__name__,
+                "model": self.llm.name,
+            }
+        return data
 
     @classmethod
     def from_dict(cls, data: Dict[str, Any]):
diff --git a/tests/unit/test_layer.py b/tests/unit/test_layer.py
index 03c0c1eaba540ec6f0f9ae0dc93ab38e41a3265d..88a4679a8dc71c016817472c9a504ff1ab56dac8 100644
--- a/tests/unit/test_layer.py
+++ b/tests/unit/test_layer.py
@@ -7,6 +7,7 @@ import pytest
 from semantic_router.encoders import BaseEncoder, CohereEncoder, OpenAIEncoder
 from semantic_router.layer import LayerConfig, RouteLayer
 from semantic_router.route import Route
+from semantic_router.llms.base import BaseLLM
 
 
 def mock_encoder_call(utterances):
@@ -268,30 +269,143 @@ class TestRouteLayer:
         assert route_layer.score_threshold == 0.5
 
     def test_json(self, openai_encoder, routes):
-        with tempfile.NamedTemporaryFile(suffix=".yaml", delete=False) as temp:
+        temp = tempfile.NamedTemporaryFile(suffix=".yaml", delete=False)
+        try:
+            temp_path = temp.name  # Save the temporary file's path
+            temp.close()  # Close the file to ensure it can be opened again on Windows
             os.environ["OPENAI_API_KEY"] = "test_api_key"
             route_layer = RouteLayer(encoder=openai_encoder, routes=routes)
-            route_layer.to_json(temp.name)
-            assert os.path.exists(temp.name)
-            route_layer_from_file = RouteLayer.from_json(temp.name)
+            route_layer.to_json(temp_path)
+            assert os.path.exists(temp_path)
+            route_layer_from_file = RouteLayer.from_json(temp_path)
             assert (
                 route_layer_from_file.index is not None
                 and route_layer_from_file._get_route_names() is not None
             )
-            os.remove(temp.name)
+        finally:
+            os.remove(temp_path)  # Ensure the file is deleted even if the test fails
 
     def test_yaml(self, openai_encoder, routes):
-        with tempfile.NamedTemporaryFile(suffix=".yaml", delete=False) as temp:
+        temp = tempfile.NamedTemporaryFile(suffix=".yaml", delete=False)
+        try:
+            temp_path = temp.name  # Save the temporary file's path
+            temp.close()  # Close the file to ensure it can be opened again on Windows
             os.environ["OPENAI_API_KEY"] = "test_api_key"
             route_layer = RouteLayer(encoder=openai_encoder, routes=routes)
-            route_layer.to_yaml(temp.name)
-            assert os.path.exists(temp.name)
-            route_layer_from_file = RouteLayer.from_yaml(temp.name)
+            route_layer.to_yaml(temp_path)
+            assert os.path.exists(temp_path)
+            route_layer_from_file = RouteLayer.from_yaml(temp_path)
             assert (
                 route_layer_from_file.index is not None
                 and route_layer_from_file._get_route_names() is not None
             )
-            os.remove(temp.name)
+        finally:
+            os.remove(temp_path)  # Ensure the file is deleted even if the test fails
+
+    def test_from_file_json(openai_encoder, tmp_path):
+        # Create a temporary JSON file with layer configuration
+        config_path = tmp_path / "config.json"
+        config_path.write_text(
+            layer_json()
+        )  # Assuming layer_json() returns a valid JSON string
+
+        # Load the LayerConfig from the temporary file
+        layer_config = LayerConfig.from_file(str(config_path))
+
+        # Assertions to verify the loaded configuration
+        assert layer_config.encoder_type == "cohere"
+        assert layer_config.encoder_name == "embed-english-v3.0"
+        assert len(layer_config.routes) == 2
+        assert layer_config.routes[0].name == "politics"
+
+    def test_from_file_yaml(openai_encoder, tmp_path):
+        # Create a temporary YAML file with layer configuration
+        config_path = tmp_path / "config.yaml"
+        config_path.write_text(
+            layer_yaml()
+        )  # Assuming layer_yaml() returns a valid YAML string
+
+        # Load the LayerConfig from the temporary file
+        layer_config = LayerConfig.from_file(str(config_path))
+
+        # Assertions to verify the loaded configuration
+        assert layer_config.encoder_type == "cohere"
+        assert layer_config.encoder_name == "embed-english-v3.0"
+        assert len(layer_config.routes) == 2
+        assert layer_config.routes[0].name == "politics"
+
+    def test_from_file_invalid_path(self):
+        with pytest.raises(FileNotFoundError) as excinfo:
+            LayerConfig.from_file("nonexistent_path.json")
+        assert "[Errno 2] No such file or directory: 'nonexistent_path.json'" in str(
+            excinfo.value
+        )
+
+    def test_from_file_unsupported_type(self, tmp_path):
+        # Create a temporary unsupported file
+        config_path = tmp_path / "config.unsupported"
+        config_path.write_text(layer_json())
+
+        with pytest.raises(ValueError) as excinfo:
+            LayerConfig.from_file(str(config_path))
+        assert "Unsupported file type" in str(excinfo.value)
+
+    def test_from_file_invalid_config(self, tmp_path):
+        # Define an invalid configuration JSON
+        invalid_config_json = """
+        {
+            "encoder_type": "cohere",
+            "encoder_name": "embed-english-v3.0",
+            "routes": "This should be a list, not a string"
+        }"""
+
+        # Write the invalid configuration to a temporary JSON file
+        config_path = tmp_path / "invalid_config.json"
+        with open(config_path, "w") as file:
+            file.write(invalid_config_json)
+
+        # Patch the is_valid function to return False for this test
+        with patch("semantic_router.layer.is_valid", return_value=False):
+            # Attempt to load the LayerConfig from the temporary file
+            # and assert that it raises an exception due to invalid configuration
+            with pytest.raises(Exception) as excinfo:
+                LayerConfig.from_file(str(config_path))
+            assert "Invalid config JSON or YAML" in str(
+                excinfo.value
+            ), "Loading an invalid configuration should raise an exception."
+
+    def test_from_file_with_llm(self, tmp_path):
+        llm_config_json = """
+        {
+            "encoder_type": "cohere",
+            "encoder_name": "embed-english-v3.0",
+            "routes": [
+                {
+                    "name": "llm_route",
+                    "utterances": ["tell me a joke", "say something funny"],
+                    "llm": {
+                        "module": "semantic_router.llms.base",
+                        "class": "BaseLLM",
+                        "model": "fake-model-v1"
+                    }
+                }
+            ]
+        }"""
+
+        config_path = tmp_path / "config_with_llm.json"
+        with open(config_path, "w") as file:
+            file.write(llm_config_json)
+
+        # Load the LayerConfig from the temporary file
+        layer_config = LayerConfig.from_file(str(config_path))
+
+        # Using BaseLLM because trying to create a useable Mock LLM is a nightmare.
+        assert isinstance(
+            layer_config.routes[0].llm, BaseLLM
+        ), "LLM should be instantiated and associated with the route based on the config"
+        assert (
+            layer_config.routes[0].llm.name == "fake-model-v1"
+        ), "LLM instance should have the 'name' attribute set correctly"
 
     def test_config(self, openai_encoder, routes):
         os.environ["OPENAI_API_KEY"] = "test_api_key"