diff --git a/llama-index-integrations/llms/llama-index-llms-vertex/llama_index/llms/vertex/base.py b/llama-index-integrations/llms/llama-index-llms-vertex/llama_index/llms/vertex/base.py
index 228918f4cdf45c50e2af8f01b46f28ea32d3cbd4..02971bd1833e9f2739571eb809c344a73aa0e59f 100644
--- a/llama-index-integrations/llms/llama-index-llms-vertex/llama_index/llms/vertex/base.py
+++ b/llama-index-integrations/llms/llama-index-llms-vertex/llama_index/llms/vertex/base.py
@@ -13,10 +13,7 @@ from llama_index.core.base.llms.types import (
 )
 from llama_index.core.bridge.pydantic import Field, PrivateAttr
 from llama_index.core.callbacks import CallbackManager
-from llama_index.core.llms.callbacks import (
-    llm_chat_callback,
-    llm_completion_callback,
-)
+from llama_index.core.llms.callbacks import llm_chat_callback, llm_completion_callback
 from llama_index.core.llms.llm import LLM
 from llama_index.core.types import BaseOutputParser, PydanticProgramMode
 from llama_index.llms.vertex.gemini_utils import create_gemini_client, is_gemini_model
@@ -133,6 +130,7 @@ class Vertex(LLM):
         return LLMMetadata(
             is_chat_model=self._is_chat_model,
             model_name=self.model,
+            system_role=MessageRole.USER,  # Vertex does not support the default: MessageRole.SYSTEM
         )
 
     @property