diff --git a/llama-index-core/llama_index/core/instrumentation/events/base.py b/llama-index-core/llama_index/core/instrumentation/events/base.py
index 4c7636004fe8575adb18225a20221000fda9348b..061aea2605a4dd2497095f74a143c7b93a0fd3db 100644
--- a/llama-index-core/llama_index/core/instrumentation/events/base.py
+++ b/llama-index-core/llama_index/core/instrumentation/events/base.py
@@ -15,6 +15,7 @@ class BaseEvent(BaseModel):
 
     class Config:
         arbitrary_types_allowed = True
+        copy_on_model_validation = "deep"
 
     def dict(self, **kwargs: Any) -> Dict[str, Any]:
         data = super().dict(**kwargs)
diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-voyageai/llama_index/embeddings/voyageai/base.py b/llama-index-integrations/embeddings/llama-index-embeddings-voyageai/llama_index/embeddings/voyageai/base.py
index ac9e92797fdd7456e1059ed93c209913fa011b6e..efcc8a677fd1247e110d31ccfa926667c749c469 100644
--- a/llama-index-integrations/embeddings/llama-index-embeddings-voyageai/llama_index/embeddings/voyageai/base.py
+++ b/llama-index-integrations/embeddings/llama-index-embeddings-voyageai/llama_index/embeddings/voyageai/base.py
@@ -3,11 +3,11 @@
 import logging
 from typing import Any, List, Optional
 
+from llama_index.core.bridge.pydantic import PrivateAttr
 from llama_index.core.base.embeddings.base import BaseEmbedding
 from llama_index.core.callbacks.base import CallbackManager
 
 import voyageai
-from pydantic import PrivateAttr
 
 logger = logging.getLogger(__name__)
 
@@ -23,8 +23,8 @@ class VoyageEmbedding(BaseEmbedding):
             You can either specify the key here or store it as an environment variable.
     """
 
-    client: voyageai.Client = PrivateAttr(None)
-    aclient: voyageai.client_async.AsyncClient = PrivateAttr()
+    _client: voyageai.Client = PrivateAttr(None)
+    _aclient: voyageai.client_async.AsyncClient = PrivateAttr()
     truncation: Optional[bool] = None
 
     def __init__(
@@ -53,8 +53,8 @@ class VoyageEmbedding(BaseEmbedding):
             **kwargs,
         )
 
-        self.client = voyageai.Client(api_key=voyage_api_key)
-        self.aclient = voyageai.AsyncClient(api_key=voyage_api_key)
+        self._client = voyageai.Client(api_key=voyage_api_key)
+        self._aclient = voyageai.AsyncClient(api_key=voyage_api_key)
         self.truncation = truncation
 
     @classmethod
@@ -62,7 +62,7 @@ class VoyageEmbedding(BaseEmbedding):
         return "VoyageEmbedding"
 
     def _get_embedding(self, texts: List[str], input_type: str) -> List[List[float]]:
-        return self.client.embed(
+        return self._client.embed(
             texts,
             model=self.model_name,
             input_type=input_type,
@@ -72,7 +72,7 @@ class VoyageEmbedding(BaseEmbedding):
     async def _aget_embedding(
         self, texts: List[str], input_type: str
     ) -> List[List[float]]:
-        r = await self.aclient.embed(
+        r = await self._aclient.embed(
             texts,
             model=self.model_name,
             input_type=input_type,
diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-voyageai/pyproject.toml b/llama-index-integrations/embeddings/llama-index-embeddings-voyageai/pyproject.toml
index ebad2fa805168a3f2e992118112137195398efb3..27e7d784e396ffb448958ae5ba2fcd919a126f6d 100644
--- a/llama-index-integrations/embeddings/llama-index-embeddings-voyageai/pyproject.toml
+++ b/llama-index-integrations/embeddings/llama-index-embeddings-voyageai/pyproject.toml
@@ -27,7 +27,7 @@ exclude = ["**/BUILD"]
 license = "MIT"
 name = "llama-index-embeddings-voyageai"
 readme = "README.md"
-version = "0.1.3"
+version = "0.1.4"
 
 [tool.poetry.dependencies]
 python = ">=3.8.1,<4.0"