diff --git a/docs/examples/llm/mymagic.ipynb b/docs/examples/llm/mymagic.ipynb index 823dffe442058f01d1ad0dcb25a592f84c6b0232..af38d4a83547fa699d3835ffca2dd781eb645edf 100644 --- a/docs/examples/llm/mymagic.ipynb +++ b/docs/examples/llm/mymagic.ipynb @@ -67,6 +67,7 @@ " role_arn=\"your-role-arn\",\n", " system_prompt=\"your-system-prompt\",\n", " region=\"your-bucket-region\",\n", + " return_output=False, # Whether you want MyMagic API to return the output json\n", ")" ] }, @@ -78,7 +79,7 @@ "source": [ "resp = llm.complete(\n", " question=\"your-question\",\n", - " model=\"chhoose-model\", # currently we support mistral7b, llama7b, mixtral8x7b,codellama70b, llama70b, more to come...\n", + " model=\"chhoose-model\", # currently we support mistral7b, llama7b, mixtral8x7b, codellama70b, llama70b, more to come...\n", " max_tokens=5, # number of tokens to generate, default is 10\n", ")" ] diff --git a/llama-index-integrations/llms/llama-index-llms-mymagic/llama_index/llms/mymagic/base.py b/llama-index-integrations/llms/llama-index-llms-mymagic/llama_index/llms/mymagic/base.py index 5d7e49d15be0b31c9f313cdaea26b55074dc7058..7051e10759e2fd0c93060090aaf4d07443705e3c 100644 --- a/llama-index-integrations/llms/llama-index-llms-mymagic/llama_index/llms/mymagic/base.py +++ b/llama-index-integrations/llms/llama-index-llms-mymagic/llama_index/llms/mymagic/base.py @@ -48,6 +48,9 @@ class MyMagicAI(LLM): region: Optional[str] = Field( "eu-west-2", description="The region the bucket is in. Only used for AWS S3." ) + return_output: Optional[bool] = Field( + False, description="Whether MyMagic API should return the output json" + ) def __init__( self, @@ -58,6 +61,7 @@ class MyMagicAI(LLM): system_prompt: Optional[str], role_arn: Optional[str] = None, region: Optional[str] = None, + return_output: Optional[bool] = False, **kwargs: Any, ) -> None: super().__init__(**kwargs) @@ -71,6 +75,7 @@ class MyMagicAI(LLM): "role_arn": role_arn, "system_prompt": system_prompt, "region": region, + "return_output": return_output, } @classmethod