diff --git a/semantic_router/llms/openrouter.py b/semantic_router/llms/openrouter.py
index 608834dd8679cba506e84665f559df9a22078619..0376236486c06f32775730bb332f13c5f87b0ab8 100644
--- a/semantic_router/llms/openrouter.py
+++ b/semantic_router/llms/openrouter.py
@@ -1,6 +1,8 @@
 import os
 from typing import List, Optional
 
+from pydantic import PrivateAttr
+
 import openai
 
 from semantic_router.llms import BaseLLM
@@ -9,8 +11,8 @@ from semantic_router.utils.logger import logger
 
 
 class OpenRouterLLM(BaseLLM):
-    client: Optional[openai.OpenAI]
-    base_url: Optional[str]
+    _client: Optional[openai.OpenAI] = PrivateAttr(default=None)
+    _base_url: str = PrivateAttr(default="https://openrouter.ai/api/v1")
 
     def __init__(
         self,
@@ -25,12 +27,12 @@ class OpenRouterLLM(BaseLLM):
                 "OPENROUTER_CHAT_MODEL_NAME", "mistralai/mistral-7b-instruct"
             )
         super().__init__(name=name)
-        self.base_url = base_url
+        self._base_url = base_url
         api_key = openrouter_api_key or os.getenv("OPENROUTER_API_KEY")
         if api_key is None:
             raise ValueError("OpenRouter API key cannot be 'None'.")
         try:
-            self.client = openai.OpenAI(api_key=api_key, base_url=self.base_url)
+            self._client = openai.OpenAI(api_key=api_key, base_url=self._base_url)
         except Exception as e:
             raise ValueError(
                 f"OpenRouter API client failed to initialize. Error: {e}"
@@ -39,10 +41,10 @@ class OpenRouterLLM(BaseLLM):
         self.max_tokens = max_tokens
 
     def __call__(self, messages: List[Message]) -> str:
-        if self.client is None:
+        if self._client is None:
             raise ValueError("OpenRouter client is not initialized.")
         try:
-            completion = self.client.chat.completions.create(
+            completion = self._client.chat.completions.create(
                 model=self.name,
                 messages=[m.to_openai() for m in messages],
                 temperature=self.temperature,
diff --git a/tests/unit/encoders/test_fastembed.py b/tests/unit/encoders/test_fastembed.py
index 35c05111e9ed37e1982fce3a0ef090169a6d4351..f46303c1bb069653c792fcf00c05ee9ced112923 100644
--- a/tests/unit/encoders/test_fastembed.py
+++ b/tests/unit/encoders/test_fastembed.py
@@ -4,6 +4,7 @@ import pytest
 
 _ = pytest.importorskip("fastembed")
 
+
 class TestFastEmbedEncoder:
     def test_fastembed_encoder(self):
         encode = FastEmbedEncoder()
diff --git a/tests/unit/llms/test_llm_openrouter.py b/tests/unit/llms/test_llm_openrouter.py
index 9b1ee150f2b301984c24eeb144453cd6a5ea0973..71e874f10124748c5e32e869807dcec2fcb0960d 100644
--- a/tests/unit/llms/test_llm_openrouter.py
+++ b/tests/unit/llms/test_llm_openrouter.py
@@ -12,7 +12,7 @@ def openrouter_llm(mocker):
 
 class TestOpenRouterLLM:
     def test_openrouter_llm_init_with_api_key(self, openrouter_llm):
-        assert openrouter_llm.client is not None, "Client should be initialized"
+        assert openrouter_llm._client is not None, "Client should be initialized"
         assert (
             openrouter_llm.name == "mistralai/mistral-7b-instruct"
         ), "Default name not set correctly"
@@ -20,7 +20,7 @@ class TestOpenRouterLLM:
     def test_openrouter_llm_init_success(self, mocker):
         mocker.patch("os.getenv", return_value="fake-api-key")
         llm = OpenRouterLLM()
-        assert llm.client is not None
+        assert llm._client is not None
 
     def test_openrouter_llm_init_without_api_key(self, mocker):
         mocker.patch("os.getenv", return_value=None)
@@ -29,7 +29,7 @@ class TestOpenRouterLLM:
 
     def test_openrouter_llm_call_uninitialized_client(self, openrouter_llm):
         # Set the client to None to simulate an uninitialized client
-        openrouter_llm.client = None
+        openrouter_llm._client = None
         with pytest.raises(ValueError) as e:
             llm_input = [Message(role="user", content="test")]
             openrouter_llm(llm_input)
@@ -51,7 +51,7 @@ class TestOpenRouterLLM:
 
         mocker.patch("os.getenv", return_value="fake-api-key")
         mocker.patch.object(
-            openrouter_llm.client.chat.completions,
+            openrouter_llm._client.chat.completions,
             "create",
             return_value=mock_completion,
         )