From b224ff42d045dd2c3839c03a5c9e35f4c1303e79 Mon Sep 17 00:00:00 2001 From: Jerry Liu <jerryjliu98@gmail.com> Date: Sat, 23 Mar 2024 00:23:49 -0700 Subject: [PATCH] mistral function calling notebook (#12190) --- docs/docs/examples/llm/mistralai.ipynb | 85 +++++++++++++++++++++++--- 1 file changed, 77 insertions(+), 8 deletions(-) diff --git a/docs/docs/examples/llm/mistralai.ipynb b/docs/docs/examples/llm/mistralai.ipynb index 00a3dc5abe..765743508f 100644 --- a/docs/docs/examples/llm/mistralai.ipynb +++ b/docs/docs/examples/llm/mistralai.ipynb @@ -1,7 +1,6 @@ { "cells": [ { - "attachments": {}, "cell_type": "markdown", "id": "6453d3d5", "metadata": {}, @@ -10,7 +9,6 @@ ] }, { - "attachments": {}, "cell_type": "markdown", "id": "72ed6f61-28a7-4f90-8a45-e3f452f95dbd", "metadata": {}, @@ -82,7 +80,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "Paul Graham is a well-known entrepreneur, hacker, and essayist. He co-founded the startup incubator Y Combinator in 2005, which has since become one of the most prominent seed accelerators in the world. Graham's essays on entrepreneurship, startups, and technology have been widely read and influential. He has also been an active programmer and has made significant contributions to various open-source projects. Graham's writing has been collected in several books, including \"Hackers & Painters,\" \"Maker's Schedule, Manager's Schedule,\" and \"The Accidental Entrepreneur.\" He is known for his insightful and thought-provoking perspectives on technology, business, and culture.\n" + "Paul Graham is a well-known entrepreneur, hacker, and essayist. He co-founded the startup incubator Y Combinator in 2005, which has since become one of the most successful and influential startup accelerators in the world. Graham is also known for his essays on entrepreneurship, programming, and startups, which have been published on his website, Hacker News, and in various publications. He has been described as a \"pioneer of the startup scene in Silicon Valley\" and a \"leading figure in the Y Combinator startup community.\" Graham's essays have influenced generations of entrepreneurs and programmers, and he is considered a thought leader in the tech industry.\n" ] } ], @@ -146,7 +144,6 @@ ] }, { - "attachments": {}, "cell_type": "markdown", "id": "71c3f32d", "metadata": {}, @@ -206,7 +203,6 @@ ] }, { - "attachments": {}, "cell_type": "markdown", "id": "e6adbf45", "metadata": {}, @@ -253,7 +249,6 @@ ] }, { - "attachments": {}, "cell_type": "markdown", "id": "67293b92", "metadata": {}, @@ -458,6 +453,80 @@ " print(r.delta, end=\"\")" ] }, + { + "cell_type": "markdown", + "id": "ca6fefbb-419a-4f08-922c-ecd2362ff338", + "metadata": {}, + "source": [ + "## Function Calling\n", + "\n", + "`mistral-large` supports native function calling. There's a seamless integration with LlamaIndex tools, through the `predict_and_call` function on the `llm`. \n", + "\n", + "This allows the user to attach any tools and let the LLM decide which tools to call (if any).\n", + "\n", + "If you wish to perform tool calling as part of an agentic loop, check out our [agent guides](https://docs.llamaindex.ai/en/latest/module_guides/deploying/agents/) instead.\n", + "\n", + "**NOTE**: If you use another Mistral model, we will use a ReAct prompt to attempt to call the function. Your mileage may vary." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ab80d63a-7bd4-446c-9a22-0d1604dbc59e", + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.llms.mistralai import MistralAI\n", + "from llama_index.core.tools import FunctionTool\n", + "\n", + "\n", + "def multiply(a: int, b: int) -> int:\n", + " \"\"\"Multiple two integers and returns the result integer\"\"\"\n", + " return a * b\n", + "\n", + "\n", + "def mystery(a: int, b: int) -> int:\n", + " \"\"\"Mystery function on two integers.\"\"\"\n", + " return a * b + a + b\n", + "\n", + "\n", + "mystery_tool = FunctionTool.from_defaults(fn=mystery)\n", + "multiply_tool = FunctionTool.from_defaults(fn=multiply)\n", + "\n", + "llm = MistralAI(model=\"mistral-large-latest\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bba8a5fa-bed9-44d3-bb3c-149dbc694bf6", + "metadata": {}, + "outputs": [], + "source": [ + "response = llm.predict_and_call(\n", + " [mystery_tool, multiply_tool],\n", + " user_msg=\"What happens if I run the mystery function on 5 and 7\",\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "28b3a9ed-1d0e-4bdc-abb2-acf4afab7a5f", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "47\n" + ] + } + ], + "source": [ + "print(str(response))" + ] + }, { "cell_type": "markdown", "id": "5152a2b4-78e6-47a5-933d-f5186ec0f775", @@ -500,9 +569,9 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3", + "display_name": "llama_index_v3", "language": "python", - "name": "python3" + "name": "llama_index_v3" }, "language_info": { "codemirror_mode": { -- GitLab