diff --git a/llama_index/llms/gemini.py b/llama_index/llms/gemini.py index ee3b78c0b8ca07b3c4ead01df6acd121b6497ed4..233ddb6367cc9a8f495958cf8ff99d6fb5eaed64 100644 --- a/llama_index/llms/gemini.py +++ b/llama_index/llms/gemini.py @@ -71,6 +71,7 @@ class Gemini(CustomLLM): safety_settings: "genai.types.SafetySettingOptions" = None, callback_manager: Optional[CallbackManager] = None, api_base: Optional[str] = None, + transport: Optional[str] = None, **generate_kwargs: Any, ): """Creates a new Gemini model interface.""" @@ -89,6 +90,9 @@ class Gemini(CustomLLM): } if api_base: config_params["client_options"] = {"api_endpoint": api_base} + if transport: + config_params["transport"] = transport + # transport: A string, one of: [`rest`, `grpc`, `grpc_asyncio`]. genai.configure(**config_params) base_gen_config = generation_config if generation_config else {} diff --git a/llama_index/multi_modal_llms/gemini.py b/llama_index/multi_modal_llms/gemini.py index aa6920a3b602cfb77817282e1f6b72bb24f7a1c6..ff164e862e94f76f8e8e3310762c0ed98d6de41b 100644 --- a/llama_index/multi_modal_llms/gemini.py +++ b/llama_index/multi_modal_llms/gemini.py @@ -74,6 +74,7 @@ class GeminiMultiModal(MultiModalLLM): generation_config: Optional["genai.types.GenerationConfigDict"] = None, safety_settings: "genai.types.SafetySettingOptions" = None, api_base: Optional[str] = None, + transport: Optional[str] = None, callback_manager: Optional[CallbackManager] = None, **generate_kwargs: Any, ): @@ -100,6 +101,9 @@ class GeminiMultiModal(MultiModalLLM): } if api_base: config_params["client_options"] = {"api_endpoint": api_base} + if transport: + config_params["transport"] = transport + # transport: A string, one of: [`rest`, `grpc`, `grpc_asyncio`]. genai.configure(**config_params) base_gen_config = generation_config if generation_config else {}