diff --git a/semantic_router/indices/base.py b/semantic_router/indices/base.py
index a36435a9225c805141fb6dc00c45ea8d14910f9e..a2c43db4d56cf0926f184ad31395914c83be2415 100644
--- a/semantic_router/indices/base.py
+++ b/semantic_router/indices/base.py
@@ -13,6 +13,7 @@ class BaseIndex(BaseModel):
     # You can define common attributes here if there are any.
     # For example, a placeholder for the index attribute:
     index: Optional[Any] = None
+    type: str = ""
 
     def add(self, embeds: List[Any]):
         """
diff --git a/semantic_router/indices/local_index.py b/semantic_router/indices/local_index.py
index 9e6aa8a7ff1a77ef9f5c829899c55625caff14e5..8433c004fd175a551914ac1ef2700783c7af9cbe 100644
--- a/semantic_router/indices/local_index.py
+++ b/semantic_router/indices/local_index.py
@@ -6,6 +6,10 @@ from semantic_router.indices.base import BaseIndex
 
 class LocalIndex(BaseIndex):
 
+    def __init__(self, **data):
+        super().__init__(**data) 
+        self.type = "local"
+
     class Config:  # Stop pydantic from complaining about  Optional[np.ndarray] type hints.
         arbitrary_types_allowed = True
 
diff --git a/semantic_router/indices/pinecone.py b/semantic_router/indices/pinecone.py
index 2ee905faacf41dd79420de482c11ce77285dcb2b..e95d5761016ee844f35b728d412044c6086eaed2 100644
--- a/semantic_router/indices/pinecone.py
+++ b/semantic_router/indices/pinecone.py
@@ -4,6 +4,7 @@ import pinecone
 from typing import Any, List, Tuple
 from semantic_router.indices.base import BaseIndex
 import numpy as np
+import uuid
 
 class PineconeIndex(BaseIndex):
     index_name: str
@@ -16,7 +17,8 @@ class PineconeIndex(BaseIndex):
 
     def __init__(self, **data):
         super().__init__(**data) 
-        # Initialize Pinecone environment with the new API
+
+        self.type = "pinecone"
         self.pinecone = pinecone.Pinecone(api_key=os.getenv("PINECONE_API_KEY"))
         
         # Create or connect to an existing Pinecone index
@@ -33,17 +35,15 @@ class PineconeIndex(BaseIndex):
             )
         self.index = self.pinecone.Index(self.index_name)
         
-    def add(self, embeds: List[List[float]]):
-        # Format embeds as a list of dictionaries for Pinecone's upsert method
+    def add(self, embeds_with_route_names: List[Tuple[List[float], str]]):
         vectors_to_upsert = []
-        for vector in embeds:
-            self.vector_id_counter += 1  # Increment the counter for each new vector
-            vector_id = str(self.vector_id_counter)  # Convert counter to string ID
-
-            # Prepare for upsert
-            vectors_to_upsert.append({"id": vector_id, "values": vector})
-
-        # Perform the upsert operation
+        for vector, route_name in embeds_with_route_names:
+            vector_id = str(uuid.uuid4())
+            vectors_to_upsert.append({
+                "id": vector_id, 
+                "values": vector,
+                "metadata": {"route_name": route_name}
+            })
         self.index.upsert(vectors=vectors_to_upsert)
 
     def remove(self, ids_to_remove: List[str]):
@@ -56,24 +56,15 @@ class PineconeIndex(BaseIndex):
         stats = self.index.describe_index_stats()
         return stats["dimension"] > 0 and stats["total_vector_count"] > 0
     
-    def query(self, query_vector: np.ndarray, top_k: int = 5) -> Tuple[np.ndarray, np.ndarray]:
+    def query(self, query_vector: np.ndarray, top_k: int = 5) -> Tuple[np.ndarray, List[str]]:
         query_vector_list = query_vector.tolist()
-        results = self.index.query(vector=[query_vector_list], top_k=top_k)
-        ids = [int(result["id"]) for result in results["matches"]]
+        results = self.index.query(
+            vector=[query_vector_list], 
+            top_k=top_k,
+            include_metadata=True)
         scores = [result["score"] for result in results["matches"]]
-        # DEBUGGING: Start.
-        print('#'*50)
-        print('ids')
-        print(ids)
-        print('#'*50)
-        # DEBUGGING: End.
-        # DEBUGGING: Start.
-        print('#'*50)
-        print('scores')
-        print(scores)
-        print('#'*50)
-        # DEBUGGING: End.
-        return np.array(scores), np.array(ids)
+        route_names = [result["metadata"]["route_name"] for result in results["matches"]]
+        return np.array(scores), route_names
 
     def delete_index(self):
         pinecone.delete_index(self.index_name)
\ No newline at end of file
diff --git a/semantic_router/layer.py b/semantic_router/layer.py
index b5fb8193c3b83f87daa3e50e6dd97365cca1f846..8634af07f84c6c36e282bc47bd3e10831daeaf0b 100644
--- a/semantic_router/layer.py
+++ b/semantic_router/layer.py
@@ -188,9 +188,6 @@ class RouteLayer:
             self._add_routes(routes=self.routes)
 
     def check_for_matching_routes(self, top_class: str) -> Optional[Route]:
-        # DEBUGGING: Start.
-        print(f'top_class 2: {top_class}')
-        # DEBUGGING: End.
         matching_routes = [route for route in self.routes if route.name == top_class]
         if not matching_routes:
             logger.error(
@@ -213,17 +210,8 @@ class RouteLayer:
             vector_arr = np.array(vector)
         # get relevant utterances
         results = self._retrieve(xq=vector_arr)
-        # DEBUGGING: Start.
-        print(f'results: {results}')
-        # DEBUGGING: End.
         # decide most relevant routes
         top_class, top_class_scores = self._semantic_classify(results)
-        # DEBUGGING: Start.
-        print(f'top_class 1: {top_class}')
-        # DEBUGGING: End.
-        # DEBUGGING: Start.
-        print(f'top_class_scores: {top_class_scores}')
-        # DEBUGGING: End.
         # TODO do we need this check?
         route = self.check_for_matching_routes(top_class)
         if route is None:
@@ -233,24 +221,6 @@ class RouteLayer:
             if route.score_threshold is not None
             else self.score_threshold
         )
-        # DEBUGGING: Start.
-        print('#'*50)
-        print('Chosen route')
-        print(route)
-        print('#'*50)
-        # DEBUGGING: End.
-        # DEBUGGING: Start.
-        print('#'*50)
-        print('top_class_scores')
-        print(top_class_scores)
-        print('#'*50)
-        # DEBUGGING: End.
-        # DEBUGGING: Start.
-        print('#'*50)
-        print('threshold')
-        print(threshold)
-        print('#'*50)
-        # DEBUGGING: End.
         passed = self._pass_threshold(top_class_scores, threshold)
         if passed:
             if route.function_schema and text is None:
@@ -306,14 +276,21 @@ class RouteLayer:
         if route.score_threshold is None:
             route.score_threshold = self.score_threshold
 
-        # create route array
-        if self.categories is None:
-            self.categories = np.array([route.name] * len(embeds))
-        else:
-            str_arr = np.array([route.name] * len(embeds))
-            self.categories = np.concatenate([self.categories, str_arr])
-        # create utterance array (the index)
-        self.index.add(embeds)
+        # Embed route arrays with method that depends on index type.
+        if self.index.type == "local":
+            # create route array
+            if self.categories is None:
+                self.categories = np.array([route.name] * len(embeds))
+            else:
+                str_arr = np.array([route.name] * len(embeds))
+                self.categories = np.concatenate([self.categories, str_arr])
+            self.index.add(embeds)
+        elif self.index.type == "pinecone":
+            vectors_to_upsert = []
+            for _, embed in enumerate(embeds):
+                vectors_to_upsert.append((embed, route.name))
+            self.index.add(vectors_to_upsert)
+
         # add route to routes list
         self.routes.append(route)
 
@@ -340,20 +317,19 @@ class RouteLayer:
 
     def _add_routes(self, routes: List[Route]):
         # create embeddings for all routes
-        all_utterances = [
-            utterance for route in routes for utterance in route.utterances
-        ]
+        all_utterances = [utterance for route in routes for utterance in route.utterances]
         embedded_utterances = self.encoder(all_utterances)
 
         # create route array
         route_names = [route.name for route in routes for _ in route.utterances]
-        route_array = np.array(route_names)
-        self.categories = (
-            np.concatenate([self.categories, route_array])
-            if self.categories is not None
-            else route_array
-        )
-        self.index.add(embedded_utterances)
+
+        if self.index.type == "local":
+            # For local index, just add the embeddings directly
+            self.index.add(embedded_utterances)
+        elif self.index.type == "pinecone":
+            # For Pinecone, prepare a list of 2-tuples with embeddings and route names
+            vectors_to_upsert = list(zip(embedded_utterances, route_names))
+            self.index.add(vectors_to_upsert)
 
     def _encode(self, text: str) -> Any:
         """Given some text, encode it."""
@@ -364,21 +340,14 @@ class RouteLayer:
 
     def _retrieve(self, xq: Any, top_k: int = 5) -> List[dict]:
         """Given a query vector, retrieve the top_k most similar records."""
-        # DEBUGGING: Start.
-        print('#'*50)
-        print('RouteLayer._retrieve - CHECKPOINT 1')
-        print('#'*50)
-        # DEBUGGING: End.
         if self.index.is_index_populated():
-            # DEBUGGING: Start.
-            print('#'*50)
-            print('RouteLayer._retrieve - CHECKPOINT 2')
-            print('#'*50)
-            # DEBUGGING: End.
             # calculate similarity matrix
-            scores, idx = self.index.query(xq, top_k)
-            # get the utterance categories (route names)
-            routes = self.categories[idx] if self.categories is not None else []
+            if self.index.type == "local":
+                scores, idx = self.index.query(xq, top_k)
+                # get the utterance categories (route names)
+                routes = self.categories[idx] if self.categories is not None else []
+            elif self.index.type == "pinecone":
+                scores, routes = self.index.query(xq, top_k)
             return [{"route": d, "score": s.item()} for d, s in zip(routes, scores)]
         else:
             logger.warning("No index found for route layer.")