From b7721f5088b0d6fddd7f4ae89b193c4cc7737e9a Mon Sep 17 00:00:00 2001
From: Siraj R Aizlewood <siraj@aurelio.ai>
Date: Tue, 13 Feb 2024 23:41:11 +0400
Subject: [PATCH] Fix for case when no LLM in config dict.

---
 semantic_router/layer.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/semantic_router/layer.py b/semantic_router/layer.py
index 153d0607..46abe83d 100644
--- a/semantic_router/layer.py
+++ b/semantic_router/layer.py
@@ -98,7 +98,7 @@ class LayerConfig:
             routes = []
             for route_data in layer["routes"]:
                 # Handle the 'llm' field specially if it exists
-                if "llm" in route_data:
+                if "llm" in route_data and route_data["llm"] is not None:
                     llm_data = route_data.pop(
                         "llm"
                     )  # Remove 'llm' from route_data and handle it separately
-- 
GitLab