From e92e5293ace3c9912eb893c16358f15bb766a6a6 Mon Sep 17 00:00:00 2001
From: BetterAndBetterII <141388234+BetterAndBetterII@users.noreply.github.com>
Date: Tue, 6 Feb 2024 00:24:13 +0800
Subject: [PATCH] Support Gemini "transport" configuration (#10457)

* Support Gemini "transport" configuration

Added Gemini transportation method configuration support.

* Sync updates in multi_modal_llms\gemini
---
 llama_index/llms/gemini.py             | 4 ++++
 llama_index/multi_modal_llms/gemini.py | 4 ++++
 2 files changed, 8 insertions(+)

diff --git a/llama_index/llms/gemini.py b/llama_index/llms/gemini.py
index ee3b78c0b8..233ddb6367 100644
--- a/llama_index/llms/gemini.py
+++ b/llama_index/llms/gemini.py
@@ -71,6 +71,7 @@ class Gemini(CustomLLM):
         safety_settings: "genai.types.SafetySettingOptions" = None,
         callback_manager: Optional[CallbackManager] = None,
         api_base: Optional[str] = None,
+        transport: Optional[str] = None,
         **generate_kwargs: Any,
     ):
         """Creates a new Gemini model interface."""
@@ -89,6 +90,9 @@ class Gemini(CustomLLM):
         }
         if api_base:
             config_params["client_options"] = {"api_endpoint": api_base}
+        if transport:
+            config_params["transport"] = transport
+        # transport: A string, one of: [`rest`, `grpc`, `grpc_asyncio`].
         genai.configure(**config_params)
 
         base_gen_config = generation_config if generation_config else {}
diff --git a/llama_index/multi_modal_llms/gemini.py b/llama_index/multi_modal_llms/gemini.py
index aa6920a3b6..ff164e862e 100644
--- a/llama_index/multi_modal_llms/gemini.py
+++ b/llama_index/multi_modal_llms/gemini.py
@@ -74,6 +74,7 @@ class GeminiMultiModal(MultiModalLLM):
         generation_config: Optional["genai.types.GenerationConfigDict"] = None,
         safety_settings: "genai.types.SafetySettingOptions" = None,
         api_base: Optional[str] = None,
+        transport: Optional[str] = None,
         callback_manager: Optional[CallbackManager] = None,
         **generate_kwargs: Any,
     ):
@@ -100,6 +101,9 @@ class GeminiMultiModal(MultiModalLLM):
         }
         if api_base:
             config_params["client_options"] = {"api_endpoint": api_base}
+        if transport:
+            config_params["transport"] = transport
+        # transport: A string, one of: [`rest`, `grpc`, `grpc_asyncio`].
         genai.configure(**config_params)
 
         base_gen_config = generation_config if generation_config else {}
-- 
GitLab