From db4114ab3766ade087361484884fa547eff6858c Mon Sep 17 00:00:00 2001 From: singhmanas1 <122591937+singhmanas1@users.noreply.github.com> Date: Thu, 7 Mar 2024 19:35:59 -0800 Subject: [PATCH] Feat/mymagic-return-output (#11761) * Added the return output json * Added the return output json * cr --------- Co-authored-by: Manas Singh <manassingh@Manass-MacBook-Air.local> Co-authored-by: Haotian Zhang <socool.king@gmail.com> --- docs/examples/llm/mymagic.ipynb | 3 ++- .../llama_index/llms/mymagic/base.py | 5 +++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/docs/examples/llm/mymagic.ipynb b/docs/examples/llm/mymagic.ipynb index 823dffe442..af38d4a835 100644 --- a/docs/examples/llm/mymagic.ipynb +++ b/docs/examples/llm/mymagic.ipynb @@ -67,6 +67,7 @@ " role_arn=\"your-role-arn\",\n", " system_prompt=\"your-system-prompt\",\n", " region=\"your-bucket-region\",\n", + " return_output=False, # Whether you want MyMagic API to return the output json\n", ")" ] }, @@ -78,7 +79,7 @@ "source": [ "resp = llm.complete(\n", " question=\"your-question\",\n", - " model=\"chhoose-model\", # currently we support mistral7b, llama7b, mixtral8x7b,codellama70b, llama70b, more to come...\n", + " model=\"chhoose-model\", # currently we support mistral7b, llama7b, mixtral8x7b, codellama70b, llama70b, more to come...\n", " max_tokens=5, # number of tokens to generate, default is 10\n", ")" ] diff --git a/llama-index-integrations/llms/llama-index-llms-mymagic/llama_index/llms/mymagic/base.py b/llama-index-integrations/llms/llama-index-llms-mymagic/llama_index/llms/mymagic/base.py index 5d7e49d15b..7051e10759 100644 --- a/llama-index-integrations/llms/llama-index-llms-mymagic/llama_index/llms/mymagic/base.py +++ b/llama-index-integrations/llms/llama-index-llms-mymagic/llama_index/llms/mymagic/base.py @@ -48,6 +48,9 @@ class MyMagicAI(LLM): region: Optional[str] = Field( "eu-west-2", description="The region the bucket is in. Only used for AWS S3." ) + return_output: Optional[bool] = Field( + False, description="Whether MyMagic API should return the output json" + ) def __init__( self, @@ -58,6 +61,7 @@ class MyMagicAI(LLM): system_prompt: Optional[str], role_arn: Optional[str] = None, region: Optional[str] = None, + return_output: Optional[bool] = False, **kwargs: Any, ) -> None: super().__init__(**kwargs) @@ -71,6 +75,7 @@ class MyMagicAI(LLM): "role_arn": role_arn, "system_prompt": system_prompt, "region": region, + "return_output": return_output, } @classmethod -- GitLab