diff --git a/docs/examples/llm/mymagic.ipynb b/docs/examples/llm/mymagic.ipynb index d1ee8a10a3251fb4613584e1c43218055ba0565d..823dffe442058f01d1ad0dcb25a592f84c6b0232 100644 --- a/docs/examples/llm/mymagic.ipynb +++ b/docs/examples/llm/mymagic.ipynb @@ -66,6 +66,7 @@ " session=\"your-session-name\", # files should be located in this folder on which batch inference will be run\n", " role_arn=\"your-role-arn\",\n", " system_prompt=\"your-system-prompt\",\n", + " region=\"your-bucket-region\",\n", ")" ] }, @@ -123,6 +124,7 @@ " session=\"your-session-name\", # files should be located in this folder on which batch inference will be run\n", " role_arn=\"your-role-arn\",\n", " system_prompt=\"your-system-prompt\",\n", + " region=\"your-bucket-region\",\n", " )\n", " response = await allm.acomplete(\n", " question=\"your-question\",\n", diff --git a/llama-index-integrations/llms/llama-index-llms-mymagic/llama_index/llms/mymagic/base.py b/llama-index-integrations/llms/llama-index-llms-mymagic/llama_index/llms/mymagic/base.py index 9c3f6f3ea3ae3d5d74565aa3a90b3c0f8365718f..5d7e49d15be0b31c9f313cdaea26b55074dc7058 100644 --- a/llama-index-integrations/llms/llama-index-llms-mymagic/llama_index/llms/mymagic/base.py +++ b/llama-index-integrations/llms/llama-index-llms-mymagic/llama_index/llms/mymagic/base.py @@ -36,7 +36,7 @@ class MyMagicAI(LLM): description="The session to use. This is a subfolder in the bucket where your data is located.", ) role_arn: Optional[str] = Field( - None, description="ARN for role assumption in AWS S3" + None, description="ARN for role assumption in AWS S3." ) system_prompt: str = Field( default="Answer the question based only on the given content. Do not give explanations or examples. Do not continue generating more text after the answer.", @@ -45,6 +45,9 @@ class MyMagicAI(LLM): question_data: Dict[str, Any] = Field( default_factory=dict, description="The data to send to the MyMagicAI API." ) + region: Optional[str] = Field( + "eu-west-2", description="The region the bucket is in. Only used for AWS S3." + ) def __init__( self, @@ -54,6 +57,7 @@ class MyMagicAI(LLM): session: str, system_prompt: Optional[str], role_arn: Optional[str] = None, + region: Optional[str] = None, **kwargs: Any, ) -> None: super().__init__(**kwargs) @@ -66,6 +70,7 @@ class MyMagicAI(LLM): "max_tokens": self.max_tokens, "role_arn": role_arn, "system_prompt": system_prompt, + "region": region, } @classmethod