diff --git a/docs/api_reference/index.rst b/docs/api_reference/index.rst
index 01750689452d0bb936662c371dbe81664a2ce309..6ad05d3756120563e3c82486e6c32ba2e7067410 100644
--- a/docs/api_reference/index.rst
+++ b/docs/api_reference/index.rst
@@ -14,6 +14,7 @@ API Reference for the ``llama-index`` package.
    evaluation.rst
    example_notebooks.rst
    indices.rst
+   instrumentation.rst
    llms.rst
    embeddings.rst
    memory.rst
diff --git a/docs/api_reference/instrumentation.rst b/docs/api_reference/instrumentation.rst
new file mode 100644
index 0000000000000000000000000000000000000000..853d4cf360f025cd70beae673835ff9c1b0abb7b
--- /dev/null
+++ b/docs/api_reference/instrumentation.rst
@@ -0,0 +1,77 @@
+Instrumentation
+===============
+
+LlamaIndex contains a simple instrumentation framework that allows you to
+observe events and spans happening in the framework.
+
+Event Handlers
+--------------
+
+.. autopydantic_model:: llama_index.core.instrumentation.event_handlers.base.BaseEventHandler
+
+Event Types
+-----------
+
+.. autopydantic_model:: llama_index.core.instrumentation.events.base.BaseEvent
+
+.. autopydantic_model:: llama_index.core.instrumentation.events.agent.AgentChatWithStepEndEvent
+
+.. autopydantic_model:: llama_index.core.instrumentation.events.agent.AgentChatWithStepStartEvent
+
+.. autopydantic_model:: llama_index.core.instrumentation.events.agent.AgentRunStepEndEvent
+
+.. autopydantic_model:: llama_index.core.instrumentation.events.agent.AgentRunStepStartEvent
+
+.. autopydantic_model:: llama_index.core.instrumentation.events.agent.AgentToolCallEvent
+
+.. autopydantic_model:: llama_index.core.instrumentation.events.chat_engine.StreamChatDeltaReceivedEvent
+
+.. autopydantic_model:: llama_index.core.instrumentation.events.chat_engine.StreamChatEndEvent
+
+.. autopydantic_model:: llama_index.core.instrumentation.events.chat_engine.StreamChatErrorEvent
+
+.. autopydantic_model:: llama_index.core.instrumentation.events.chat_engine.StreamChatStartEvent
+
+.. autopydantic_model:: llama_index.core.instrumentation.events.embedding.EmbeddingEndEvent
+
+.. autopydantic_model:: llama_index.core.instrumentation.events.embedding.EmbeddingStartEvent
+
+.. autopydantic_model:: llama_index.core.instrumentation.events.llm.LLMChatEndEvent
+
+.. autopydantic_model:: llama_index.core.instrumentation.events.llm.LLMChatStartEvent
+
+.. autopydantic_model:: llama_index.core.instrumentation.events.llm.LLMCompletionEndEvent
+
+.. autopydantic_model:: llama_index.core.instrumentation.events.llm.LLMCompletionStartEvent
+
+.. autopydantic_model:: llama_index.core.instrumentation.events.llm.LLMPredictEndEvent
+
+.. autopydantic_model:: llama_index.core.instrumentation.events.llm.LLMPredictStartEvent
+
+.. autopydantic_model:: llama_index.core.instrumentation.events.query.QueryEndEvent
+
+.. autopydantic_model:: llama_index.core.instrumentation.events.query.QueryStartEvent
+
+.. autopydantic_model:: llama_index.core.instrumentation.events.retrieval.RetrievalEndEvent
+
+.. autopydantic_model:: llama_index.core.instrumentation.events.retrieval.RetrievalStartEvent
+
+.. autopydantic_model:: llama_index.core.instrumentation.events.synthesis.GetResponseEndEvent
+
+.. autopydantic_model:: llama_index.core.instrumentation.events.synthesis.GetResponseStartEvent
+
+.. autopydantic_model:: llama_index.core.instrumentation.events.synthesis.SynthesizeEndEvent
+
+.. autopydantic_model:: llama_index.core.instrumentation.events.synthesis.SynthesizeStartEvent
+
+Span Handlers
+-------------
+
+.. autopydantic_model:: llama_index.core.instrumentation.span_handlers.base.BaseSpanHandler
+
+.. autopydantic_model:: llama_index.core.instrumentation.span_handlers.simple.SimpleSpanHandler
+
+Spans Types
+-----------
+
+.. autopydantic_model:: llama_index.core.instrumentation.span.base.BaseSpan
diff --git a/docs/examples/instrumentation/basic_usage.ipynb b/docs/examples/instrumentation/basic_usage.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..f30a13c31fa588d0cab21e567120db15e80a71b6
--- /dev/null
+++ b/docs/examples/instrumentation/basic_usage.ipynb
@@ -0,0 +1,659 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "id": "761a0ad3-7506-4073-bc2f-a46b91452e53",
+   "metadata": {},
+   "source": [
+    "# Instrumentation: Basic Usage"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "21348683-1b4e-42f9-abf8-0548fe02cfde",
+   "metadata": {},
+   "source": [
+    "The `instrumentation` module can be used for observability and monitoring of your llama-index application. It is comprised of the following core abstractions:\n",
+    "\n",
+    "- `Event` — represents a single moment in time that a certain occurrence took place within the execution of the application’s code.\n",
+    "- `EventHandler` — listen to the occurrences of `Event`'s and execute code logic at these moments in time.\n",
+    "- `Span` — represents the execution flow of a particular part in the application’s code and thus contains `Event`'s.\n",
+    "- `SpanHandler` — is responsible for the entering, exiting, and dropping (i.e., early exiting due to error) of `Span`'s.\n",
+    "- `Dispatcher` — emits `Event`'s as well as signals to enter/exit/drop a `Span` to the appropriate handlers."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "0d24c1a2-6dca-4636-b5f6-1547dd045ff3",
+   "metadata": {},
+   "source": [
+    "In this notebook, we demonstrate the basic usage pattern of `instrumentation`:\n",
+    "\n",
+    "1. Define your custom `EventHandler`\n",
+    "2. Define your custom `SpanHandler` which handles an associated `Span` type\n",
+    "3. Attach your `EventHandler` and `SpanHandler` to the dispatcher of choice (here, we'll attach it to the root dispatcher)."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "9692504e-51a2-4a58-86ae-5de5f5994966",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import nest_asyncio\n",
+    "\n",
+    "nest_asyncio.apply()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "3cb70687-c568-4991-b962-778b5edb3dbf",
+   "metadata": {},
+   "source": [
+    "### Custom Event Handlers"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "8c4ec173-3dc9-49c6-b613-b4a9bf88871f",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from llama_index.core.instrumentation.event_handlers import BaseEventHandler"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "7fc325f0-d95c-4d5e-aae6-33d7812ba587",
+   "metadata": {},
+   "source": [
+    "Defining your custom `EventHandler` involves subclassing the `BaseEventHandler`. Doing so, requires defining logic for the abstract method `handle()`."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "d5e113aa-f507-4c4e-89a9-bfdc6dbf31ec",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "class MyEventHandler(BaseEventHandler):\n",
+    "    @classmethod\n",
+    "    def class_name(cls) -> str:\n",
+    "        \"\"\"Class name.\"\"\"\n",
+    "        return \"MyEventHandler\"\n",
+    "\n",
+    "    def handle(self, event) -> None:\n",
+    "        \"\"\"Logic for handling event.\"\"\"\n",
+    "        # THIS IS WHERE YOU ADD YOUR LOGIC TO HANDLE EVENTS\n",
+    "        print(event.dict())\n",
+    "        print(\"\")\n",
+    "        with open(\"log.txt\", \"a\") as f:\n",
+    "            f.write(str(event))\n",
+    "            f.write(\"\\n\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "975fb994-436f-4f5d-8a53-74a92ad82a2b",
+   "metadata": {},
+   "source": [
+    "### Custom Span Handlers"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "07799718-c466-4d44-8dfd-b03a24c09eef",
+   "metadata": {},
+   "source": [
+    "`SpanHandler` also involve subclassing a base class, in this case `BaseSpanHandler`. However, since `SpanHandler`'s work with an associated `Span` type, you will need to create this as well if you want to handle a new `Span` type."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "d4fc8888-d74c-4bf3-b353-97263104b17e",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from llama_index.core.instrumentation.span import BaseSpan"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "a22d4b0d-2c64-4aa3-94ea-1e15533be44e",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from typing import Any, Optional\n",
+    "from llama_index.core.bridge.pydantic import Field\n",
+    "from llama_index.core.instrumentation.span.base import BaseSpan\n",
+    "from llama_index.core.instrumentation.span_handlers import BaseSpanHandler\n",
+    "\n",
+    "\n",
+    "class MyCustomSpan(BaseSpan):\n",
+    "    custom_field_1: Any = Field(...)\n",
+    "    custom_field_2: Any = Field(...)\n",
+    "\n",
+    "\n",
+    "class MyCustomSpanHandler(BaseSpanHandler[MyCustomSpan]):\n",
+    "    @classmethod\n",
+    "    def class_name(cls) -> str:\n",
+    "        \"\"\"Class name.\"\"\"\n",
+    "        return \"MyCustomSpanHandler\"\n",
+    "\n",
+    "    def new_span(\n",
+    "        self, id: str, parent_span_id: Optional[str], **kwargs\n",
+    "    ) -> Optional[MyCustomSpan]:\n",
+    "        \"\"\"Create a span.\"\"\"\n",
+    "        # logic for creating a new MyCustomSpan\n",
+    "        pass\n",
+    "\n",
+    "    def prepare_to_exit_span(\n",
+    "        self, id: str, result: Optional[Any] = None, **kwargs\n",
+    "    ) -> Any:\n",
+    "        \"\"\"Logic for preparing to exit a span.\"\"\"\n",
+    "        pass\n",
+    "\n",
+    "    def prepare_to_drop_span(\n",
+    "        self, id: str, err: Optional[Exception], **kwargs\n",
+    "    ) -> Any:\n",
+    "        \"\"\"Logic for preparing to drop a span.\"\"\"\n",
+    "        pass"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "c2e07177-52c1-4d08-8185-4c8e1f6a89fe",
+   "metadata": {},
+   "source": [
+    "For this notebook, we'll use `SimpleSpanHandler` that works with the `SimpleSpan` type."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "18746b28-86e5-4eb8-bebc-8d28435657c9",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from llama_index.core.instrumentation.span_handlers import SimpleSpanHandler"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "9ab6d442-6d40-4a19-ac9b-1253ba00028e",
+   "metadata": {},
+   "source": [
+    "### Dispatcher\n"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "a3710027-218f-4b50-bef6-80497845a376",
+   "metadata": {},
+   "source": [
+    "Now that we have our `EventHandler` and our `SpanHandler`, we can attach it to a `Dispatcher` that will emit `Event`'s and signals to start/exit/drop a `Span` to the appropriate handlers. Those that are familiar with `Logger` from the `logging` Python module, might notice that `Dispatcher` adopts a similar interface. What's more is that `Dispatcher` also utilizes a similar hierarchy and propagation scheme as `Logger`. Specifically, a `dispatcher` will emit `Event`'s to its handlers and by default propagate these events to its parent `Dispatcher` for it to send to its own handlers."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "d3138230-2014-4030-9076-59f9d06c0b8d",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import llama_index.core.instrumentation as instrument\n",
+    "\n",
+    "dispatcher = instrument.get_dispatcher()  # modify root dispatcher"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "8d65a394-e390-486b-b261-e2f318c4b9e1",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "span_handler = SimpleSpanHandler()\n",
+    "\n",
+    "dispatcher.add_event_handler(MyEventHandler())\n",
+    "dispatcher.add_span_handler(span_handler)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "4235053e-4b1e-4280-9284-1a5d9b44cb76",
+   "metadata": {},
+   "source": [
+    "You can also get dispatcher's by name. Purely for the sake of demonstration, in the cells below we get the dispatcher that is defined in the `base.base_query_engine` submodule of `llama_index.core`."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "43798315-04ee-4671-ba43-c02c8d762dbc",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "qe_dispatcher = instrument.get_dispatcher(\n",
+    "    \"llama_index.core.base.base_query_engine\"\n",
+    ")"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "8c191594-ed8c-4a1e-bdf7-0681bfb76115",
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "Dispatcher(name='llama_index.core.base.base_query_engine', event_handlers=[], span_handlers=[NullSpanHandler(open_spans={}, current_span_id=None)], parent_name='root', manager=<llama_index.core.instrumentation.dispatcher.Manager object at 0x1481ab4c0>, root_name='root', propagate=True)"
+      ]
+     },
+     "execution_count": null,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "qe_dispatcher"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "35d0ba04-2328-41ed-9822-bef7c362fd87",
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "Dispatcher(name='root', event_handlers=[NullEventHandler(), MyEventHandler()], span_handlers=[NullSpanHandler(open_spans={}, current_span_id=None), SimpleSpanHandler(open_spans={}, current_span_id=None, completed_spans=[])], parent_name='', manager=None, root_name='root', propagate=False)"
+      ]
+     },
+     "execution_count": null,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "qe_dispatcher.parent"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "2ebcd2fa-84d7-459d-9d33-4d02f52c48df",
+   "metadata": {},
+   "source": [
+    "### Test It Out"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "a37cb9a7-d5fe-43a1-8da7-4aeae5f74846",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "data/paul_graham/paul_graham_essay.txt: No such file or directory\n"
+     ]
+    }
+   ],
+   "source": [
+    "!mkdir -p 'data/'\n",
+    "!wget 'https://raw.githubusercontent.com/run-llama/llama_index/main/docs/examples/data/paul_graham/paul_graham_essay.txt' -O 'data/paul_graham/paul_graham_essay.txt'"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "f02f8dc5-18da-4e70-abed-614e0b98ce8c",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from llama_index.core import SimpleDirectoryReader, VectorStoreIndex\n",
+    "\n",
+    "documents = SimpleDirectoryReader(input_dir=\"./data\").load_data()\n",
+    "index = VectorStoreIndex.from_documents(documents)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "a3de7479-e48e-42fd-8515-f04e1c314469",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "query_engine = index.as_query_engine()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "e44df059-edc6-4342-ac06-8a9f293b8d5d",
+   "metadata": {},
+   "source": [
+    "#### Sync"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "5719b4d5-d50b-49bc-80ca-a1233817c64d",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 57, 50, 614289), 'id_': UUID('35643a53-52da-4547-b770-dba7600c9070'), 'class_name': 'QueryStartEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 57, 50, 615966), 'id_': UUID('18fa9b70-6fbc-4ce7-9e45-1f292766e820'), 'class_name': 'RetrievalStartEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 57, 50, 810658), 'id_': UUID('2aed6810-99a1-49ab-a6c4-5f242f1b1de3'), 'class_name': 'RetrievalEndEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 57, 50, 811651), 'id_': UUID('6f8d96a2-4da0-485f-9a73-4dae2d026b48'), 'class_name': 'SynthesizeStartEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 57, 50, 818960), 'id_': UUID('005e52bd-25f6-49cb-8766-5399098b7e51'), 'class_name': 'GetResponseStartEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 57, 50, 823964), 'id_': UUID('52273f18-2865-42d3-a11e-acbe5bb56748'), 'class_name': 'LLMPredictStartEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 57, 52, 375382), 'id_': UUID('54516719-425b-422a-b38c-8bd002d8fa74'), 'class_name': 'LLMPredictEndEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 57, 52, 376003), 'id_': UUID('24ec9eb3-d2ad-46f7-8db2-faf68075c7b3'), 'class_name': 'GetResponseEndEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 57, 52, 376347), 'id_': UUID('6d453292-374c-4fef-8bc4-ae164455a133'), 'class_name': 'SynthesizeEndEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 57, 52, 376505), 'id_': UUID('9919903a-dbb1-4bdf-8029-3e4fcd9ea073'), 'class_name': 'QueryEndEvent'}\n",
+      "\n"
+     ]
+    }
+   ],
+   "source": [
+    "query_result = query_engine.query(\"Who is Paul?\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "3810b999-d43e-4ca7-970d-4ccc41f84027",
+   "metadata": {},
+   "source": [
+    "#### Async"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "5f59bdfe-c661-4547-ae42-f8143a4b1c2c",
+   "metadata": {},
+   "source": [
+    "`Dispatcher` also works on async methods."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "23e70e94-e116-4e00-b75d-d13a3b1c23d8",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 58, 35, 276918), 'id_': UUID('8ea98ded-91f2-45cf-b418-b92b3c7693bf'), 'class_name': 'QueryStartEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 58, 35, 279006), 'id_': UUID('60ff04ce-0972-4fe1-bfaf-5ffaaea3ef4a'), 'class_name': 'RetrievalStartEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 58, 35, 555879), 'id_': UUID('c29bb55e-b2e1-4637-a6cb-745a2424da90'), 'class_name': 'RetrievalEndEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 58, 35, 557244), 'id_': UUID('dee202a1-f760-495e-a6f8-3644a535ff13'), 'class_name': 'SynthesizeStartEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 58, 35, 564098), 'id_': UUID('c524828c-cdd7-4ddd-876a-5fda5f10143d'), 'class_name': 'GetResponseStartEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 58, 35, 568930), 'id_': UUID('bd8e78ce-9a87-41a8-b009-e0694e09e0b3'), 'class_name': 'LLMPredictStartEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 58, 37, 70264), 'id_': UUID('54633e8f-8f89-4a4c-9e79-f059f9dbecc2'), 'class_name': 'LLMPredictEndEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 58, 37, 71236), 'id_': UUID('0a110c5c-3d4c-4eeb-8066-b8e512e69838'), 'class_name': 'GetResponseEndEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 58, 37, 71652), 'id_': UUID('1152f15e-ac5b-4292-ad9f-45f8404183f9'), 'class_name': 'SynthesizeEndEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 58, 37, 71891), 'id_': UUID('8aa8f930-1ac2-4924-a185-00a06bc7ba79'), 'class_name': 'QueryEndEvent'}\n",
+      "\n"
+     ]
+    }
+   ],
+   "source": [
+    "query_result = await query_engine.aquery(\"Who is Paul?\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "f2fcb260-cec3-446a-9971-49bad189e83d",
+   "metadata": {},
+   "source": [
+    "#### Streaming"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "0b11b898-9a55-4ac4-b7ed-aa92ca1b6865",
+   "metadata": {},
+   "source": [
+    "`Dispatcher` also works on methods that support streaming!"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "fef4f47b-291a-4878-81e0-bef7cb5299e4",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "chat_engine = index.as_chat_engine()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "491e0962-d49d-428f-b861-b9883e1708fc",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 31, 345865), 'id_': UUID('1d9643ca-368b-4e08-9878-ed7682196007'), 'class_name': 'AgentChatWithStepStartEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 31, 346727), 'id_': UUID('c38a18e3-0c2c-43b3-a1a9-0fb2e696e627'), 'class_name': 'AgentRunStepStartEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 31, 348524), 'id_': UUID('9a49dd15-715c-474a-b704-b9e8df919c50'), 'class_name': 'StreamChatStartEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 31, 975148), 'id_': UUID('c3b5b9eb-104d-461e-a081-e65ec9dc3131'), 'class_name': 'StreamChatEndEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 31, 977522), 'id_': UUID('2e6c3935-8ece-49bf-aacc-de18fd79da42'), 'class_name': 'QueryStartEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 31, 978389), 'id_': UUID('5c43f441-d262-427f-8ef7-b3b6e6e86f44'), 'class_name': 'RetrievalStartEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 32, 188462), 'id_': UUID('724dd93f-39a8-4b12-b056-194c4cf3ed72'), 'class_name': 'RetrievalEndEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 32, 189601), 'id_': UUID('27e5ac36-d313-4df6-bc8b-40b79e59698e'), 'class_name': 'SynthesizeStartEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 32, 208520), 'id_': UUID('77ec49c0-fb24-46dd-b843-954e241a0eb0'), 'class_name': 'GetResponseStartEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 32, 214106), 'id_': UUID('b28106fa-8d8e-49ca-92af-a37e0db45b9f'), 'class_name': 'LLMPredictStartEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 33, 59544), 'id_': UUID('0e40cac7-9eb4-48d3-81bc-d01a5b3c0440'), 'class_name': 'LLMPredictEndEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 33, 60941), 'id_': UUID('77e1d7eb-5807-4184-9e18-de4fdde18654'), 'class_name': 'GetResponseEndEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 33, 61349), 'id_': UUID('565c9019-89b0-4dec-bb54-0d9642030009'), 'class_name': 'SynthesizeEndEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 33, 61677), 'id_': UUID('3cba488e-1d69-4b75-a601-2cf467816ef0'), 'class_name': 'QueryEndEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 33, 62157), 'id_': UUID('a550a4c7-40b3-43ac-abb5-5cab4b759888'), 'class_name': 'AgentRunStepEndEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 33, 62417), 'id_': UUID('d99b6fd9-bf35-4c37-9625-da39cc5aef23'), 'class_name': 'AgentRunStepStartEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 33, 63294), 'id_': UUID('b8015c1d-53b7-4530-812c-2ded8dab4c67'), 'class_name': 'StreamChatStartEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 33, 387260), 'id_': UUID('d5bbdb58-be35-444d-b0f0-0ccbc570e54e'), 'delta': 'Why', 'class_name': 'StreamChatDeltaReceivedEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 33, 389911), 'id_': UUID('91e78251-def6-4bfb-9712-20c4c0d2690a'), 'class_name': 'AgentRunStepEndEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 33, 389700), 'id_': UUID('8b181253-1ea6-4ffb-b384-c425307d7b88'), 'delta': ' did', 'class_name': 'StreamChatDeltaReceivedEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 33, 390495), 'id_': UUID('57dd72cf-8b8c-4596-8de7-4a701fc50125'), 'class_name': 'AgentChatWithStepEndEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 33, 409497), 'id_': UUID('e6ceba61-5c78-4ee3-972e-c9e02dbddc7c'), 'delta': ' the', 'class_name': 'StreamChatDeltaReceivedEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 33, 410653), 'id_': UUID('e9d8d9fe-1080-455c-8add-06b1774506bc'), 'delta': ' computer', 'class_name': 'StreamChatDeltaReceivedEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 33, 449414), 'id_': UUID('04cffd87-ca8a-4efb-af2a-a99964610e4c'), 'delta': ' keep', 'class_name': 'StreamChatDeltaReceivedEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 33, 450316), 'id_': UUID('21824a0a-3674-42d1-91c7-d8a865c2b270'), 'delta': ' its', 'class_name': 'StreamChatDeltaReceivedEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 33, 495431), 'id_': UUID('a677507c-6b74-454f-a185-3df008e9e5ff'), 'delta': ' drinks', 'class_name': 'StreamChatDeltaReceivedEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 33, 496188), 'id_': UUID('8b94885c-ce78-46cc-8ee6-938484ae2980'), 'delta': ' on', 'class_name': 'StreamChatDeltaReceivedEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 33, 527857), 'id_': UUID('dc356d5d-c968-4d43-9903-bc158b73338a'), 'delta': ' the', 'class_name': 'StreamChatDeltaReceivedEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 33, 529075), 'id_': UUID('f8a2637f-e746-418d-8921-4e7e8c26956f'), 'delta': ' motherboard', 'class_name': 'StreamChatDeltaReceivedEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 33, 554042), 'id_': UUID('dd21cd77-c329-4947-ad77-7683885e0ce1'), 'delta': '?', 'class_name': 'StreamChatDeltaReceivedEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 33, 557320), 'id_': UUID('93a2a7e1-7fe5-4eb8-851e-1b3dea4df6b5'), 'delta': ' Because', 'class_name': 'StreamChatDeltaReceivedEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 33, 608305), 'id_': UUID('445cb52e-9cf3-4f85-aba3-6383974e6b5f'), 'delta': ' it', 'class_name': 'StreamChatDeltaReceivedEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 33, 609392), 'id_': UUID('bd7bb3dc-5bd7-418e-a18a-203cdb701bcb'), 'delta': ' had', 'class_name': 'StreamChatDeltaReceivedEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 33, 609896), 'id_': UUID('e981bdb3-57ca-456e-8353-be087ea6bb55'), 'delta': ' too', 'class_name': 'StreamChatDeltaReceivedEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 33, 610659), 'id_': UUID('79b3eb21-6f8d-4a87-9f26-04e757a29da3'), 'delta': ' many', 'class_name': 'StreamChatDeltaReceivedEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 33, 667840), 'id_': UUID('73073cd6-dfae-4632-a302-a4841a08f272'), 'delta': ' bytes', 'class_name': 'StreamChatDeltaReceivedEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 33, 669579), 'id_': UUID('c30da40c-bf7d-49f0-9505-2345fc67fde7'), 'delta': '!', 'class_name': 'StreamChatDeltaReceivedEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 33, 670733), 'id_': UUID('f9baf8fc-8755-4740-8fe4-f5a3c9d77f9e'), 'delta': ' 😄', 'class_name': 'StreamChatDeltaReceivedEvent'}\n",
+      "\n",
+      "{'timestamp': datetime.datetime(2024, 3, 14, 15, 59, 33, 672180), 'id_': UUID('e07eb812-68b5-4027-8ee0-87bf6cd5c744'), 'class_name': 'StreamChatEndEvent'}\n",
+      "\n"
+     ]
+    }
+   ],
+   "source": [
+    "streaming_response = chat_engine.stream_chat(\"Tell me a joke.\")"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "27e4c33f-ee90-4716-8dc9-2ed22edd5197",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Why did the computer keep its drinks on the motherboard? Because it had too many bytes! 😄"
+     ]
+    }
+   ],
+   "source": [
+    "for token in streaming_response.response_gen:\n",
+    "    print(token, end=\"\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "d2266149-be2c-462b-a029-652363cb925f",
+   "metadata": {},
+   "source": [
+    "### Printing Basic Trace Trees with `SimpleSpanHandler`"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "876ee4dd-3e28-4aae-a917-daac42f17b6f",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "BaseQueryEngine.query-bda10f51-e5c8-4ef8-9467-c816b0c92797 (1.762367)\n",
+      "└── RetrieverQueryEngine._query-35da82df-8e64-43c5-9046-11df14b3b9f7 (1.760649)\n",
+      "    ├── BaseRetriever.retrieve-237d1b8d-f8b1-4e0b-908c-00087f086c2c (0.19558)\n",
+      "    │   └── VectorIndexRetriever._retrieve-af6479b8-3210-41df-9d6f-3af31059f274 (0.194024)\n",
+      "    └── BaseSynthesizer.synthesize-bf923672-6e60-4015-b6fe-0d0c9d1d35e3 (1.564853)\n",
+      "        └── CompactAndRefine.get_response-447c173e-4016-4376-9c56-a7171ea1ddf0 (1.564162)\n",
+      "            └── Refine.get_response-83b1159d-d33f-401f-a76e-bc3ee5095b57 (1.557365)\n",
+      "                └── LLM.predict-a5ab2252-1eb1-4413-9ef0-efa14c2c3b6b (1.552019)\n",
+      "\n",
+      "\n",
+      "BaseQueryEngine.aquery-7f3daee7-540f-4189-a350-a37e7e0596d5 (1.79559)\n",
+      "└── RetrieverQueryEngine._aquery-cc049d88-933a-41d3-abb4-06c5bd567e45 (1.793149)\n",
+      "    ├── BaseRetriever.aretrieve-c1a2ae34-3916-4069-81ba-7ba9b9d7235d (0.278098)\n",
+      "    │   └── VectorIndexRetriever._aretrieve-ef38d533-dd12-4fa5-9879-cd885124d8aa (0.276331)\n",
+      "    └── BaseSynthesizer.asynthesize-e3f02693-1563-4eec-898e-1043bbeec870 (1.514635)\n",
+      "        └── CompactAndRefine.aget_response-6cfcc5f8-1a47-4dde-aca7-8bd6543ce457 (1.513896)\n",
+      "            └── Refine.aget_response-7c2a4f67-f4bb-4065-b934-ac8dae7a9529 (1.507486)\n",
+      "                └── LLM.apredict-3ccf1ad0-d0b3-44bd-b64a-83bb652ed4c8 (1.502215)\n",
+      "\n",
+      "\n",
+      "AgentRunner.stream_chat-ae3bc1f0-b9ff-456c-a3b4-7264a5757336 (2.045523)\n",
+      "└── AgentRunner._chat-dd1d6afa-276a-4f7b-8c01-eb39df07b74d (2.045444)\n",
+      "    ├── AgentRunner._run_step-be8844fc-bd2c-4845-b5c5-378e9a1c97b5 (1.715629)\n",
+      "    │   ├── StreamingAgentChatResponse.write_response_to_history-b3fddc19-ee88-442b-9c95-0ea7e68fe4f3 (0.62843)\n",
+      "    │   └── BaseQueryEngine.query-4eb2f0ea-89c0-48d8-a757-92961a4c4275 (1.084424)\n",
+      "    │       └── RetrieverQueryEngine._query-fc5f2290-b715-4d33-82a6-d0eea3ba8625 (1.083421)\n",
+      "    │           ├── BaseRetriever.retrieve-62017ac0-3d6b-4ca4-91e9-d0548d226536 (0.211132)\n",
+      "    │           │   └── VectorIndexRetriever._retrieve-c77b4ae8-702c-42a9-b50a-a394c031c728 (0.209355)\n",
+      "    │           └── BaseSynthesizer.synthesize-7b6caba0-8156-4611-9f8a-baa3a7e5e151 (0.872036)\n",
+      "    │               └── CompactAndRefine.get_response-ec9651ef-4a88-4395-89da-52c2336baca3 (0.871197)\n",
+      "    │                   └── Refine.get_response-57a69243-b676-4f33-8c61-73a5ba6af03c (0.852795)\n",
+      "    │                       └── LLM.predict-b0fe909f-31e5-4655-8428-5dee6f6de7c8 (0.847305)\n",
+      "    └── AgentRunner._run_step-9a126f49-c704-45a8-9104-0e0a9d19c956 (0.327996)\n",
+      "        └── StreamingAgentChatResponse.write_response_to_history-b727aeea-dd0a-4c7a-9212-cf591caf7bb5 (0.609362)\n",
+      "\n",
+      "\n"
+     ]
+    }
+   ],
+   "source": [
+    "span_handler.print_trace_trees()"
+   ]
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "fork-llama-index-core",
+   "language": "python",
+   "name": "fork-llama-index-core"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/docs/examples/instrumentation/observe_api_calls.ipynb b/docs/examples/instrumentation/observe_api_calls.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..10cba42f26d171914ffcbdc727251a25bbc3ee8f
--- /dev/null
+++ b/docs/examples/instrumentation/observe_api_calls.ipynb
@@ -0,0 +1,184 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# API Call Observability \n",
+    "\n",
+    "Using the new `instrumentation` package, we can get direct observability into API calls made using LLMs and emebdding models.\n",
+    "\n",
+    "In this notebook, we explore doing this in order to add observability to LLM and embedding calls."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import os\n",
+    "\n",
+    "os.environ[\"OPENAI_API_KEY\"] = \"sk-...\""
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Defining an Event Handler"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from llama_index.core.instrumentation.event_handlers import BaseEventHandler\n",
+    "from llama_index.core.instrumentation.events.llm import (\n",
+    "    LLMCompletionEndEvent,\n",
+    "    LLMChatEndEvent,\n",
+    ")\n",
+    "from llama_index.core.instrumentation.events.embedding import EmbeddingEndEvent\n",
+    "\n",
+    "\n",
+    "class ModelEventHandler(BaseEventHandler):\n",
+    "    @classmethod\n",
+    "    def class_name(cls) -> str:\n",
+    "        \"\"\"Class name.\"\"\"\n",
+    "        return \"ModelEventHandler\"\n",
+    "\n",
+    "    def handle(self, event) -> None:\n",
+    "        \"\"\"Logic for handling event.\"\"\"\n",
+    "        if isinstance(event, LLMCompletionEndEvent):\n",
+    "            print(f\"LLM Prompt length: {len(event.prompt)}\")\n",
+    "            print(f\"LLM Completion: {str(event.response.text)}\")\n",
+    "        elif isinstance(event, LLMChatEndEvent):\n",
+    "            messages_str = \"\\n\".join([str(x) for x in event.messages])\n",
+    "            print(f\"LLM Input Messages length: {len(messages_str)}\")\n",
+    "            print(f\"LLM Response: {str(event.response.message)}\")\n",
+    "        elif isinstance(event, EmbeddingEndEvent):\n",
+    "            print(f\"Embedding {len(event.chunks)} text chunks\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Attaching the Event Handler"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from llama_index.core.instrumentation import get_dispatcher\n",
+    "\n",
+    "# root dispatcher\n",
+    "root_dispatcher = get_dispatcher()\n",
+    "\n",
+    "# register event handler\n",
+    "root_dispatcher.add_event_handler(ModelEventHandler())"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Invoke the Handler!"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Embedding 1 text chunks\n"
+     ]
+    }
+   ],
+   "source": [
+    "from llama_index.core import Document, VectorStoreIndex\n",
+    "\n",
+    "index = VectorStoreIndex.from_documents([Document.example()])"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Embedding 1 text chunks\n",
+      "LLM Input Messages length: 1879\n",
+      "LLM Response: assistant: LlamaIndex is a \"data framework\" designed to assist in building LLM apps. It offers tools such as data connectors for various data sources, ways to structure data for easy use with LLMs, an advanced retrieval/query interface, and integrations with different application frameworks. It caters to both beginner and advanced users, providing a high-level API for simple data ingestion and querying, as well as lower-level APIs for customization and extension of modules to suit specific requirements.\n"
+     ]
+    }
+   ],
+   "source": [
+    "query_engine = index.as_query_engine()\n",
+    "response = query_engine.query(\"Tell me about LLMs?\")"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Embedding 1 text chunks\n",
+      "LLM Input Messages length: 1890\n",
+      "LLM Response: assistant: \n",
+      "LLM Input Messages length: 1890\n",
+      "LLM Response: assistant: Hello\n",
+      "LLM Input Messages length: 1890\n",
+      "LLM Response: assistant: Hello world\n",
+      "LLM Input Messages length: 1890\n",
+      "LLM Response: assistant: Hello world!\n",
+      "LLM Input Messages length: 1890\n",
+      "LLM Response: assistant: Hello world!\n"
+     ]
+    }
+   ],
+   "source": [
+    "query_engine = index.as_query_engine(streaming=True)\n",
+    "response = query_engine.query(\"Repeat only these two words: Hello world!\")\n",
+    "for r in response.response_gen:\n",
+    "    ..."
+   ]
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "venv",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/docs/index.rst b/docs/index.rst
index ea8fe43e671c57ca4391fea27b9428c38e43ffff..188d950d32d9c895c2230b8147e4387280c4488d 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -157,6 +157,7 @@ Associated projects
    module_guides/storing/storing.md
    module_guides/querying/querying.md
    module_guides/deploying/agents/root.md
+   module_guides/observability/instrumentation.md
    module_guides/observability/observability.md
    module_guides/evaluating/root.md
    module_guides/supporting_modules/supporting_modules.md
diff --git a/docs/module_guides/observability/instrumentation.md b/docs/module_guides/observability/instrumentation.md
new file mode 100644
index 0000000000000000000000000000000000000000..8a9b49c2268517e5c818cb3b411f241ed71763d8
--- /dev/null
+++ b/docs/module_guides/observability/instrumentation.md
@@ -0,0 +1,245 @@
+# Instrumentation
+
+**NOTE**: The `instrumentation` module (available in llama-index v0.10.20 and later) is
+meant to replace the legacy `callbacks` module. During the deprecation period,
+the llama-index library supports both modules as a means to instrument your
+LLM application. However, at some point after all of the existing integrations
+have moved over to the new `instrumentation` module, we will no longer support
+`callbacks` module.
+
+The new `instrumentation` module allows for the instrumentation of `llama-index`
+applications. In particular, one can handle events and track spans using both
+custom logic as well as those offered in the module. Users can also define their
+own events and specify where and when in the code logic that they should be emitted.
+Listed below are the core classes as well as their brief description of the
+`instrumentation` module:
+
+- `Event` — represents a single moment in time that a certain occurrence took place within the execution of the application’s code.
+- `EventHandler` — listen to the occurrences of `Event`'s and execute code logic at these moments in time.
+- `Span` — represents the execution flow of a particular part in the application’s code and thus contains `Event`'s.
+- `SpanHandler` — is responsible for the entering, exiting, and dropping (i.e., early exiting due to error) of `Span`'s.
+- `Dispatcher` — emits `Event`'s as well as signals to enter/exit/drop a `Span` to the appropriate handlers.
+
+## Usage
+
+Using the new `instrumentation` module involves 3 high-level steps.
+
+1. Define a `dispatcher`
+2. Define and attach your `EventHandler`'s to `dispatcher`
+3. Define and attach your `SpanHandler` to `dispatcher`
+
+Doing so, would result in the ability to handle events and obtain spans that have
+been transmitted throughout the `llama-index` library and extension packages.
+
+### Defining a custom `EventHandler`
+
+Users can create their own custom handlers by subclassing `BaseEventHandler`
+and providing logic to the abstract method `handle()`.
+
+```python
+from llama_index.core.instrumentation.event_handlers.base import (
+    BaseEventHandler,
+)
+
+
+class MyEventHandler(BaseEventHandler):
+    """My custom EventHandler."""
+
+    @classmethod
+    def class_name(cls) -> str:
+        """Class name."""
+        return "MyEventHandler"
+
+    def handle(self, event: BaseEvent, **kwargs) -> Any:
+        """Logic for handling event."""
+        print(event.class_name())
+
+
+my_event_handler = MyEventHandler()
+```
+
+After defining your handler, you can attach it to the desired dispatcher:
+
+```python
+import llama_index.core.instrumentation as instrument
+
+dispatcher = instrument.get_dispatcher(__name__)
+dispatcher.add_event_handler(my_event_handler)
+```
+
+### Defining a custom `Event`
+
+User can create their own custom events by subclassing `BaseEvent`. The
+`BaseEvent` class comes with a `timestamp` as well as an `id_` field. To add more
+items to this event payload, simply add them in as new `Fields` (since they are
+subclasses of `pydantic.BaseModel`).
+
+```python
+from llama_index.core.instrumentation.event.base import BaseEvent
+
+
+class MyEvent(BaseEvent):
+    """My custom Event."""
+
+    new_field_1 = Field(...)
+    new_field_2 = Field(...)
+```
+
+Once you have your custom event defined, you use a dispatcher to fire the event
+at desired instances throughout your application’s code.
+
+```python
+import llama_index.core.instrumentation as instrument
+
+dispatcher = instrument.get_dispatcher(__name__)
+dispatcher.event(MyEvent(new_field_1=..., new_field_2=...))
+```
+
+### Defining a custom `Span`
+
+`Span`’s are like `Event`'s in that they are both structured data classes.
+Unlike `Event`'s though, `Span`'s as their name implies, span a duration of time
+within the programs execution flow. You can define a custom `Span` to store any
+information you would like.
+
+```python
+from typing import Any
+from llama_index.core.bridge.pydantic import Field
+
+
+class MyCustomSpan(BaseSpan):
+    custom_field_1: Any = Field(...)
+    custom_field_2: Any = Field(...)
+```
+
+To handle your new Span type, you need to also define your custom `SpanHandler`
+by subclassing the `BaseSpanHandler` class. Three abstract methods need to be
+defined when subclass this base class, namely: `new_span()`, `prepare_to_exit_span()`,
+and `prepare_to_drop_span()`.
+
+```python
+from typing import Any, Optional
+from llama_index.core.instrumentation.span.base import BaseSpan
+from llama_index.core.instrumentation.span_handlers import BaseSpanHandler
+
+
+class MyCustomSpanHandler(BaseSpanHandler[MyCustomSpan]):
+    @classmethod
+    def class_name(cls) -> str:
+        """Class name."""
+        return "MyCustomSpanHandler"
+
+    def new_span(
+        self, id: str, parent_span_id: Optional[str], **kwargs
+    ) -> Optional[MyCustomSpan]:
+        """Create a span."""
+        # logic for creating a new MyCustomSpan
+        pass
+
+    def prepare_to_exit_span(
+        self, id: str, result: Optional[Any] = None, **kwargs
+    ) -> Any:
+        """Logic for preparing to exit a span."""
+        pass
+
+    def prepare_to_drop_span(
+        self, id: str, err: Optional[Exception], **kwargs
+    ) -> Any:
+        """Logic for preparing to drop a span."""
+        pass
+```
+
+To make use of your new SpanHandler (and associated Span type), you simply need
+to add it to your desired dispatcher.
+
+```python
+import llama_index.core.instrumentation as instrument
+from llama_index.core.instrumentation.span_handler import SimpleSpanHandler
+
+dispatcher = (
+    instrument.get_dispatcher()
+)  # with no name argument, defaults to root
+
+my_span_handler = MyCustomSpanHandler()
+dispatcher.add_span_handler(my_span_handler)
+```
+
+### Entering/Exiting a `Span`
+
+To send a signal to `SpanHandler`'s to enter/exit a `Span`, we use the `span_enter()`,
+`span_exit()` methods, respectively. There is also `span_drop()` method that could
+be used to handle cases where `Span`'s are cut shorter than usual due to errors
+within the covered code’s execution.
+
+```python
+import llama_index.core.instrumentation as instrument
+
+dispatcher = instrument.get_dispatcher(__name__)
+
+
+def func():
+    dispatcher.span_enter(...)
+    try:
+        val = ...
+    except:
+        ...
+        dispatcher.span_drop(...)
+    else:
+        dispatcher.span_exit(...)
+        return val
+
+
+# or, syntactic sugar via decorators
+
+
+@dispatcher.span
+def func():
+    ...
+```
+
+### Making use of `dispatcher` hierarchy
+
+A similar hierarchy to that seen with the standard Python `logging` library and
+its `Logger` class exists for `dispatcher`. Specifically, all `dispatcher`’s
+except for the root `dispatcher` has a parent, and when handling events or span’s
+can propagate them to its parent as well (this is the default behaviour). This
+hierarchical method of handling events and spans allows for defining “global”
+event handlers as well as “local” ones.
+
+Consider the project structure defined below. There are 3 `dispatcher`'s: one at
+the top-level of the `project` and then two others at the individual sub-modules
+`llama1` and `llama2`. With this setup, any `EventHandler`’s attached to the
+project root’s `dispatcher` will be be subscribed to all `Event`'s that occur in
+the execution of code in `llama1` and `llama2`. On the other hand, `EventHandler`'s
+defined in the respective `llama<x>` sub modules will only be subscribed to the
+`Event`'s that occur within their respective sub-module execution.
+
+```sh
+project
+├── __init__.py  # has a dispatcher=instrument.get_dispatcher(__name__)
+├── llama1
+│   ├── __init__.py  # has a dispatcher=instrument.get_dispatcher(__name__)
+│   └── app_query_engine.py
+└── llama2
+    ├── __init__.py  # has a dispatcher=instrument.get_dispatcher(__name__)
+    └── app_query_engine.py
+```
+
+Notebook Guides:
+
+```{toctree}
+---
+maxdepth: 1
+---
+/examples/instrumentation/basic_usage.ipynb
+/examples/instrumentation/observe_api_calls.ipynb
+```
+
+## API Reference
+
+```{toctree}
+---
+maxdepth: 1
+---
+/api_reference/instrumentation.rst
+```
diff --git a/docs/module_guides/observability/observability.md b/docs/module_guides/observability/observability.md
index 6a419dee79e186d959e9acb4775c24e9ad95ab97..17d82097a5a596a7aa3c8c82de9b116575d7df57 100644
--- a/docs/module_guides/observability/observability.md
+++ b/docs/module_guides/observability/observability.md
@@ -1,4 +1,9 @@
-# Observability
+# Observability (Legacy)
+
+**NOTE:**
+
+The tooling and integrations mentioned in this page is considered legacy. Observability
+is now being handled via the [`instrumentation` module](./instrumentation.md) (available in v0.10.20 and later.)
 
 LlamaIndex provides **one-click observability** 🔭 to allow you to build principled LLM applications in a production setting.
 
diff --git a/llama-index-core/llama_index/core/agent/runner/base.py b/llama-index-core/llama_index/core/agent/runner/base.py
index 3419b1e37bff8f10f8a41683b14918579e16548a..6cc721e84c7467e2106e1b4870132e0c8d066a4f 100644
--- a/llama-index-core/llama_index/core/agent/runner/base.py
+++ b/llama-index-core/llama_index/core/agent/runner/base.py
@@ -28,6 +28,15 @@ from llama_index.core.llms.llm import LLM
 from llama_index.core.memory import BaseMemory, ChatMemoryBuffer
 from llama_index.core.memory.types import BaseMemory
 from llama_index.core.tools.types import BaseTool
+from llama_index.core.instrumentation.events.agent import (
+    AgentRunStepEndEvent,
+    AgentRunStepStartEvent,
+    AgentChatWithStepStartEvent,
+    AgentChatWithStepEndEvent,
+)
+import llama_index.core.instrumentation as instrument
+
+dispatcher = instrument.get_dispatcher(__name__)
 
 
 class BaseAgentRunner(BaseAgent):
@@ -231,7 +240,6 @@ class AgentRunner(BaseAgentRunner):
                 )
             else:
                 self.callback_manager = CallbackManager()
-
         self.init_task_state_kwargs = init_task_state_kwargs or {}
         self.delete_task_on_finish = delete_task_on_finish
         self.default_tool_choice = default_tool_choice
@@ -347,6 +355,7 @@ class AgentRunner(BaseAgentRunner):
         """Get completed steps."""
         return self.state.get_completed_steps(task_id)
 
+    @dispatcher.span
     def _run_step(
         self,
         task_id: str,
@@ -356,6 +365,7 @@ class AgentRunner(BaseAgentRunner):
         **kwargs: Any,
     ) -> TaskStepOutput:
         """Execute step."""
+        dispatcher.event(AgentRunStepStartEvent())
         task = self.state.get_task(task_id)
         step_queue = self.state.get_step_queue(task_id)
         step = step or step_queue.popleft()
@@ -382,6 +392,7 @@ class AgentRunner(BaseAgentRunner):
         completed_steps = self.state.get_completed_steps(task_id)
         completed_steps.append(cur_step_output)
 
+        dispatcher.event(AgentRunStepEndEvent())
         return cur_step_output
 
     async def _arun_step(
@@ -502,6 +513,7 @@ class AgentRunner(BaseAgentRunner):
 
         return cast(AGENT_CHAT_RESPONSE_TYPE, step_output.output)
 
+    @dispatcher.span
     def _chat(
         self,
         message: str,
@@ -515,6 +527,7 @@ class AgentRunner(BaseAgentRunner):
         task = self.create_task(message)
 
         result_output = None
+        dispatcher.event(AgentChatWithStepStartEvent())
         while True:
             # pass step queue in as argument, assume step executor is stateless
             cur_step_output = self._run_step(
@@ -528,10 +541,12 @@ class AgentRunner(BaseAgentRunner):
             # ensure tool_choice does not cause endless loops
             tool_choice = "auto"
 
-        return self.finalize_response(
+        result = self.finalize_response(
             task.task_id,
             result_output,
         )
+        dispatcher.event(AgentChatWithStepEndEvent())
+        return result
 
     async def _achat(
         self,
@@ -579,7 +594,10 @@ class AgentRunner(BaseAgentRunner):
             payload={EventPayload.MESSAGES: [message]},
         ) as e:
             chat_response = self._chat(
-                message, chat_history, tool_choice, mode=ChatResponseMode.WAIT
+                message=message,
+                chat_history=chat_history,
+                tool_choice=tool_choice,
+                mode=ChatResponseMode.WAIT,
             )
             assert isinstance(chat_response, AgentChatResponse)
             e.on_end(payload={EventPayload.RESPONSE: chat_response})
@@ -600,13 +618,16 @@ class AgentRunner(BaseAgentRunner):
             payload={EventPayload.MESSAGES: [message]},
         ) as e:
             chat_response = await self._achat(
-                message, chat_history, tool_choice, mode=ChatResponseMode.WAIT
+                message=message,
+                chat_history=chat_history,
+                tool_choice=tool_choice,
+                mode=ChatResponseMode.WAIT,
             )
             assert isinstance(chat_response, AgentChatResponse)
             e.on_end(payload={EventPayload.RESPONSE: chat_response})
         return chat_response
 
-    @trace_method("chat")
+    @dispatcher.span
     def stream_chat(
         self,
         message: str,
@@ -616,15 +637,13 @@ class AgentRunner(BaseAgentRunner):
         # override tool choice is provided as input.
         if tool_choice is None:
             tool_choice = self.default_tool_choice
-        with self.callback_manager.event(
-            CBEventType.AGENT_STEP,
-            payload={EventPayload.MESSAGES: [message]},
-        ) as e:
-            chat_response = self._chat(
-                message, chat_history, tool_choice, mode=ChatResponseMode.STREAM
-            )
-            assert isinstance(chat_response, StreamingAgentChatResponse)
-            e.on_end(payload={EventPayload.RESPONSE: chat_response})
+        chat_response = self._chat(
+            message=message,
+            chat_history=chat_history,
+            tool_choice=tool_choice,
+            mode=ChatResponseMode.STREAM,
+        )
+        assert isinstance(chat_response, StreamingAgentChatResponse)
         return chat_response
 
     @trace_method("chat")
diff --git a/llama-index-core/llama_index/core/base/base_query_engine.py b/llama-index-core/llama_index/core/base/base_query_engine.py
index ff56c4d7fc81c78c805550cbb9e5a49c13cd4eca..b81fd1739665a615c3f84c461fe4a114f5a8732f 100644
--- a/llama-index-core/llama_index/core/base/base_query_engine.py
+++ b/llama-index-core/llama_index/core/base/base_query_engine.py
@@ -16,14 +16,23 @@ from llama_index.core.bridge.pydantic import Field
 from llama_index.core.callbacks.base import CallbackManager
 from llama_index.core.prompts.mixin import PromptDictType, PromptMixin
 from llama_index.core.schema import NodeWithScore, QueryBundle, QueryType
+from llama_index.core.instrumentation.events.query import (
+    QueryEndEvent,
+    QueryStartEvent,
+)
+import llama_index.core.instrumentation as instrument
 
+dispatcher = instrument.get_dispatcher(__name__)
 logger = logging.getLogger(__name__)
 
 
 class BaseQueryEngine(ChainableMixin, PromptMixin):
     """Base query engine."""
 
-    def __init__(self, callback_manager: Optional[CallbackManager]) -> None:
+    def __init__(
+        self,
+        callback_manager: Optional[CallbackManager],
+    ) -> None:
         self.callback_manager = callback_manager or CallbackManager([])
 
     def _get_prompts(self) -> Dict[str, Any]:
@@ -33,17 +42,25 @@ class BaseQueryEngine(ChainableMixin, PromptMixin):
     def _update_prompts(self, prompts: PromptDictType) -> None:
         """Update prompts."""
 
+    @dispatcher.span
     def query(self, str_or_query_bundle: QueryType) -> RESPONSE_TYPE:
+        dispatcher.event(QueryStartEvent())
         with self.callback_manager.as_trace("query"):
             if isinstance(str_or_query_bundle, str):
                 str_or_query_bundle = QueryBundle(str_or_query_bundle)
-            return self._query(str_or_query_bundle)
+            query_result = self._query(str_or_query_bundle)
+        dispatcher.event(QueryEndEvent())
+        return query_result
 
+    @dispatcher.span
     async def aquery(self, str_or_query_bundle: QueryType) -> RESPONSE_TYPE:
+        dispatcher.event(QueryStartEvent())
         with self.callback_manager.as_trace("query"):
             if isinstance(str_or_query_bundle, str):
                 str_or_query_bundle = QueryBundle(str_or_query_bundle)
-            return await self._aquery(str_or_query_bundle)
+            query_result = await self._aquery(str_or_query_bundle)
+        dispatcher.event(QueryEndEvent())
+        return query_result
 
     def retrieve(self, query_bundle: QueryBundle) -> List[NodeWithScore]:
         raise NotImplementedError(
diff --git a/llama-index-core/llama_index/core/base/base_retriever.py b/llama-index-core/llama_index/core/base/base_retriever.py
index f47f7a9abde567e44c23c88cbcc0c806500e3801..51e56416e517f3df67b7ae870c08f7db74e4c299 100644
--- a/llama-index-core/llama_index/core/base/base_retriever.py
+++ b/llama-index-core/llama_index/core/base/base_retriever.py
@@ -1,4 +1,5 @@
 """Base retriever."""
+
 from abc import abstractmethod
 from typing import Any, Dict, List, Optional
 
@@ -29,6 +30,13 @@ from llama_index.core.schema import (
 from llama_index.core.service_context import ServiceContext
 from llama_index.core.settings import Settings
 from llama_index.core.utils import print_text
+from llama_index.core.instrumentation.events.retrieval import (
+    RetrievalEndEvent,
+    RetrievalStartEvent,
+)
+import llama_index.core.instrumentation as instrument
+
+dispatcher = instrument.get_dispatcher(__name__)
 
 
 class BaseRetriever(ChainableMixin, PromptMixin):
@@ -207,6 +215,7 @@ class BaseRetriever(ChainableMixin, PromptMixin):
             if not (n.node.hash in seen or seen.add(n.node.hash))  # type: ignore[func-returns-value]
         ]
 
+    @dispatcher.span
     def retrieve(self, str_or_query_bundle: QueryType) -> List[NodeWithScore]:
         """Retrieve nodes given query.
 
@@ -216,7 +225,7 @@ class BaseRetriever(ChainableMixin, PromptMixin):
 
         """
         self._check_callback_manager()
-
+        dispatcher.event(RetrievalStartEvent())
         if isinstance(str_or_query_bundle, str):
             query_bundle = QueryBundle(str_or_query_bundle)
         else:
@@ -231,12 +240,13 @@ class BaseRetriever(ChainableMixin, PromptMixin):
                 retrieve_event.on_end(
                     payload={EventPayload.NODES: nodes},
                 )
-
+        dispatcher.event(RetrievalEndEvent())
         return nodes
 
+    @dispatcher.span
     async def aretrieve(self, str_or_query_bundle: QueryType) -> List[NodeWithScore]:
         self._check_callback_manager()
-
+        dispatcher.event(RetrievalStartEvent())
         if isinstance(str_or_query_bundle, str):
             query_bundle = QueryBundle(str_or_query_bundle)
         else:
@@ -246,12 +256,14 @@ class BaseRetriever(ChainableMixin, PromptMixin):
                 CBEventType.RETRIEVE,
                 payload={EventPayload.QUERY_STR: query_bundle.query_str},
             ) as retrieve_event:
-                nodes = await self._aretrieve(query_bundle)
-                nodes = await self._ahandle_recursive_retrieval(query_bundle, nodes)
+                nodes = await self._aretrieve(query_bundle=query_bundle)
+                nodes = await self._ahandle_recursive_retrieval(
+                    query_bundle=query_bundle, nodes=nodes
+                )
                 retrieve_event.on_end(
                     payload={EventPayload.NODES: nodes},
                 )
-
+        dispatcher.event(RetrievalEndEvent())
         return nodes
 
     @abstractmethod
diff --git a/llama-index-core/llama_index/core/base/embeddings/base.py b/llama-index-core/llama_index/core/base/embeddings/base.py
index 65d9760ad68e72c0973a7db5e725c051a9f89a09..5924aca1a3d2e3a620a91a8cada1aeff456e28df 100644
--- a/llama-index-core/llama_index/core/base/embeddings/base.py
+++ b/llama-index-core/llama_index/core/base/embeddings/base.py
@@ -19,6 +19,15 @@ from llama_index.core.utils import get_tqdm_iterable
 Embedding = List[float]
 
 
+from llama_index.core.instrumentation.events.embedding import (
+    EmbeddingEndEvent,
+    EmbeddingStartEvent,
+)
+import llama_index.core.instrumentation as instrument
+
+dispatcher = instrument.get_dispatcher(__name__)
+
+
 class SimilarityMode(str, Enum):
     """Modes for similarity/distance."""
 
@@ -94,6 +103,7 @@ class BaseEmbedding(TransformComponent):
         docstring for more information.
         """
 
+    @dispatcher.span
     def get_query_embedding(self, query: str) -> Embedding:
         """
         Embed the input query.
@@ -104,6 +114,7 @@ class BaseEmbedding(TransformComponent):
         other examples of predefined instructions can be found in
         embeddings/huggingface_utils.py.
         """
+        dispatcher.event(EmbeddingStartEvent(model_dict=self.to_dict()))
         with self.callback_manager.event(
             CBEventType.EMBEDDING, payload={EventPayload.SERIALIZED: self.to_dict()}
         ) as event:
@@ -115,10 +126,15 @@ class BaseEmbedding(TransformComponent):
                     EventPayload.EMBEDDINGS: [query_embedding],
                 },
             )
+        dispatcher.event(
+            EmbeddingEndEvent(chunks=[query], embeddings=[query_embedding])
+        )
         return query_embedding
 
+    @dispatcher.span
     async def aget_query_embedding(self, query: str) -> Embedding:
         """Get query embedding."""
+        dispatcher.event(EmbeddingStartEvent(model_dict=self.to_dict()))
         with self.callback_manager.event(
             CBEventType.EMBEDDING, payload={EventPayload.SERIALIZED: self.to_dict()}
         ) as event:
@@ -130,6 +146,9 @@ class BaseEmbedding(TransformComponent):
                     EventPayload.EMBEDDINGS: [query_embedding],
                 },
             )
+        dispatcher.event(
+            EmbeddingEndEvent(chunks=[query], embeddings=[query_embedding])
+        )
         return query_embedding
 
     def get_agg_embedding_from_queries(
@@ -191,6 +210,7 @@ class BaseEmbedding(TransformComponent):
             *[self._aget_text_embedding(text) for text in texts]
         )
 
+    @dispatcher.span
     def get_text_embedding(self, text: str) -> Embedding:
         """
         Embed the input text.
@@ -200,6 +220,7 @@ class BaseEmbedding(TransformComponent):
         document for retrieval: ". If you're curious, other examples of
         predefined instructions can be found in embeddings/huggingface_utils.py.
         """
+        dispatcher.event(EmbeddingStartEvent(model_dict=self.to_dict()))
         with self.callback_manager.event(
             CBEventType.EMBEDDING, payload={EventPayload.SERIALIZED: self.to_dict()}
         ) as event:
@@ -211,11 +232,13 @@ class BaseEmbedding(TransformComponent):
                     EventPayload.EMBEDDINGS: [text_embedding],
                 }
             )
-
+        dispatcher.event(EmbeddingEndEvent(chunks=[text], embeddings=[text_embedding]))
         return text_embedding
 
+    @dispatcher.span
     async def aget_text_embedding(self, text: str) -> Embedding:
         """Async get text embedding."""
+        dispatcher.event(EmbeddingStartEvent(model_dict=self.to_dict()))
         with self.callback_manager.event(
             CBEventType.EMBEDDING, payload={EventPayload.SERIALIZED: self.to_dict()}
         ) as event:
@@ -227,9 +250,10 @@ class BaseEmbedding(TransformComponent):
                     EventPayload.EMBEDDINGS: [text_embedding],
                 }
             )
-
+        dispatcher.event(EmbeddingEndEvent(chunks=[text], embeddings=[text_embedding]))
         return text_embedding
 
+    @dispatcher.span
     def get_text_embedding_batch(
         self,
         texts: List[str],
@@ -248,6 +272,7 @@ class BaseEmbedding(TransformComponent):
             cur_batch.append(text)
             if idx == len(texts) - 1 or len(cur_batch) == self.embed_batch_size:
                 # flush
+                dispatcher.event(EmbeddingStartEvent(model_dict=self.to_dict()))
                 with self.callback_manager.event(
                     CBEventType.EMBEDDING,
                     payload={EventPayload.SERIALIZED: self.to_dict()},
@@ -260,10 +285,14 @@ class BaseEmbedding(TransformComponent):
                             EventPayload.EMBEDDINGS: embeddings,
                         },
                     )
+                dispatcher.event(
+                    EmbeddingEndEvent(chunks=cur_batch, embeddings=embeddings)
+                )
                 cur_batch = []
 
         return result_embeddings
 
+    @dispatcher.span
     async def aget_text_embedding_batch(
         self, texts: List[str], show_progress: bool = False
     ) -> List[Embedding]:
@@ -276,6 +305,7 @@ class BaseEmbedding(TransformComponent):
             cur_batch.append(text)
             if idx == len(texts) - 1 or len(cur_batch) == self.embed_batch_size:
                 # flush
+                dispatcher.event(EmbeddingStartEvent(model_dict=self.to_dict()))
                 event_id = self.callback_manager.on_event_start(
                     CBEventType.EMBEDDING,
                     payload={EventPayload.SERIALIZED: self.to_dict()},
@@ -307,6 +337,9 @@ class BaseEmbedding(TransformComponent):
         for (event_id, text_batch), embeddings in zip(
             callback_payloads, nested_embeddings
         ):
+            dispatcher.event(
+                EmbeddingEndEvent(chunks=text_batch, embeddings=embeddings)
+            )
             self.callback_manager.on_event_end(
                 CBEventType.EMBEDDING,
                 payload={
diff --git a/llama-index-core/llama_index/core/base/response/schema.py b/llama-index-core/llama_index/core/base/response/schema.py
index abaed38f17a9009887f0a1758ac51cea51614782..f3239338bbab2c0f8d9134c96c83872eff89b1a4 100644
--- a/llama-index-core/llama_index/core/base/response/schema.py
+++ b/llama-index-core/llama_index/core/base/response/schema.py
@@ -1,4 +1,5 @@
 """Response schema."""
+
 import asyncio
 from dataclasses import dataclass, field
 from typing import Any, Dict, List, Optional, Union
@@ -154,7 +155,9 @@ class AsyncStreamingResponse:
     source_nodes: List[NodeWithScore] = field(default_factory=list)
     metadata: Optional[Dict[str, Any]] = None
     response_txt: Optional[str] = None
-    _lock: asyncio.Lock = field(default_factory=asyncio.Lock)
+
+    def __post_init__(self) -> None:
+        self._lock = asyncio.Lock()
 
     async def _yield_response(self) -> TokenAsyncGen:
         """Yield the string response."""
diff --git a/llama-index-core/llama_index/core/chat_engine/types.py b/llama-index-core/llama_index/core/chat_engine/types.py
index 2e8fab6b0267a76b06d552978a54f82832ddb056..b92252870d85b44d71166a0f6a2bf84ffeb8d605 100644
--- a/llama-index-core/llama_index/core/chat_engine/types.py
+++ b/llama-index-core/llama_index/core/chat_engine/types.py
@@ -17,6 +17,15 @@ from llama_index.core.base.response.schema import Response, StreamingResponse
 from llama_index.core.memory import BaseMemory
 from llama_index.core.schema import NodeWithScore
 from llama_index.core.tools import ToolOutput
+from llama_index.core.instrumentation.events.chat_engine import (
+    StreamChatErrorEvent,
+    StreamChatEndEvent,
+    StreamChatStartEvent,
+    StreamChatDeltaReceivedEvent,
+)
+import llama_index.core.instrumentation as instrument
+
+dispatcher = instrument.get_dispatcher(__name__)
 
 logger = logging.getLogger(__name__)
 logger.setLevel(logging.WARNING)
@@ -99,6 +108,7 @@ class StreamingAgentChatResponse:
         self._aqueue.put_nowait(delta)
         self._new_item_event.set()
 
+    @dispatcher.span
     def write_response_to_history(
         self,
         memory: BaseMemory,
@@ -111,11 +121,13 @@ class StreamingAgentChatResponse:
             )
 
         # try/except to prevent hanging on error
+        dispatcher.event(StreamChatStartEvent())
         try:
             final_text = ""
             for chat in self.chat_stream:
                 self._is_function = is_function(chat.message)
                 if chat.delta:
+                    dispatcher.event(StreamChatDeltaReceivedEvent(delta=chat.delta))
                     self.put_in_queue(chat.delta)
                 final_text += chat.delta or ""
             if self._is_function is not None:  # if loop has gone through iteration
@@ -124,12 +136,14 @@ class StreamingAgentChatResponse:
                 chat.message.content = final_text.strip()  # final message
                 memory.put(chat.message)
         except Exception as e:
+            dispatcher.event(StreamChatErrorEvent())
             if not raise_error:
                 logger.warning(
                     f"Encountered exception writing response to history: {e}"
                 )
             else:
                 raise
+        dispatcher.event(StreamChatEndEvent())
 
         self._is_done = True
 
@@ -138,6 +152,7 @@ class StreamingAgentChatResponse:
         if on_stream_end_fn is not None and not self._is_function:
             on_stream_end_fn()
 
+    @dispatcher.span
     async def awrite_response_to_history(
         self,
         memory: BaseMemory,
@@ -150,11 +165,13 @@ class StreamingAgentChatResponse:
             )
 
         # try/except to prevent hanging on error
+        dispatcher.event(StreamChatStartEvent())
         try:
             final_text = ""
             async for chat in self.achat_stream:
                 self._is_function = is_function(chat.message)
                 if chat.delta:
+                    dispatcher.event(StreamChatDeltaReceivedEvent(delta=chat.delta))
                     self.aput_in_queue(chat.delta)
                 final_text += chat.delta or ""
                 self._new_item_event.set()
@@ -166,7 +183,9 @@ class StreamingAgentChatResponse:
                 chat.message.content = final_text.strip()  # final message
                 memory.put(chat.message)
         except Exception as e:
+            dispatcher.event(StreamChatErrorEvent())
             logger.warning(f"Encountered exception writing response to history: {e}")
+        dispatcher.event(StreamChatEndEvent())
         self._is_done = True
 
         # These act as is_done events for any consumers waiting
diff --git a/llama-index-core/llama_index/core/indices/vector_store/base.py b/llama-index-core/llama_index/core/indices/vector_store/base.py
index 596cf50d02d83d325b47767d150a42a86330b2e8..8795ac4b836c4f4ee5d58296774f367f12237a65 100644
--- a/llama-index-core/llama_index/core/indices/vector_store/base.py
+++ b/llama-index-core/llama_index/core/indices/vector_store/base.py
@@ -3,6 +3,7 @@
 An index that is built on top of an existing vector store.
 
 """
+
 import logging
 from typing import Any, Dict, List, Optional, Sequence
 
diff --git a/llama-index-core/llama_index/core/indices/vector_store/retrievers/retriever.py b/llama-index-core/llama_index/core/indices/vector_store/retrievers/retriever.py
index 4722fd1df51b4c83fa5866012cc188a9e4bf684b..526dcc91684be67efbf4e5874cb532b487f7ac2e 100644
--- a/llama-index-core/llama_index/core/indices/vector_store/retrievers/retriever.py
+++ b/llama-index-core/llama_index/core/indices/vector_store/retrievers/retriever.py
@@ -1,6 +1,5 @@
 """Base vector store index query."""
 
-
 from typing import Any, Dict, List, Optional
 
 from llama_index.core.base.base_retriever import BaseRetriever
@@ -17,6 +16,9 @@ from llama_index.core.vector_stores.types import (
     VectorStoreQueryMode,
     VectorStoreQueryResult,
 )
+import llama_index.core.instrumentation as instrument
+
+dispatcher = instrument.get_dispatcher(__name__)
 
 
 class VectorIndexRetriever(BaseRetriever):
@@ -66,8 +68,12 @@ class VectorIndexRetriever(BaseRetriever):
         self._filters = filters
         self._sparse_top_k = sparse_top_k
         self._kwargs: Dict[str, Any] = kwargs.get("vector_store_kwargs", {})
+
+        callback_manager = callback_manager or CallbackManager()
         super().__init__(
-            callback_manager=callback_manager, object_map=object_map, verbose=verbose
+            callback_manager=callback_manager,
+            object_map=object_map,
+            verbose=verbose,
         )
 
     @property
@@ -80,6 +86,7 @@ class VectorIndexRetriever(BaseRetriever):
         """Set similarity top k."""
         self._similarity_top_k = similarity_top_k
 
+    @dispatcher.span
     def _retrieve(
         self,
         query_bundle: QueryBundle,
@@ -93,6 +100,7 @@ class VectorIndexRetriever(BaseRetriever):
                 )
         return self._get_nodes_with_embeddings(query_bundle)
 
+    @dispatcher.span
     async def _aretrieve(self, query_bundle: QueryBundle) -> List[NodeWithScore]:
         if self._vector_store.is_embedding_query:
             if query_bundle.embedding is None and len(query_bundle.embedding_strs) > 0:
@@ -146,11 +154,9 @@ class VectorIndexRetriever(BaseRetriever):
                 ):
                     node_id = query_result.nodes[i].node_id
                     if self._docstore.document_exists(node_id):
-                        query_result.nodes[
-                            i
-                        ] = self._docstore.get_node(  # type: ignore[index]
+                        query_result.nodes[i] = self._docstore.get_node(
                             node_id
-                        )
+                        )  # type: ignore[index]
 
         log_vector_store_query_result(query_result)
 
diff --git a/llama-index-core/llama_index/core/instrumentation/BUILD b/llama-index-core/llama_index/core/instrumentation/BUILD
new file mode 100644
index 0000000000000000000000000000000000000000..db46e8d6c978c67e301dd6c47bee08c1b3fd141c
--- /dev/null
+++ b/llama-index-core/llama_index/core/instrumentation/BUILD
@@ -0,0 +1 @@
+python_sources()
diff --git a/llama-index-core/llama_index/core/instrumentation/__init__.py b/llama-index-core/llama_index/core/instrumentation/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..a060bf2f095cbe089d5b852bbdc6887ee69c0268
--- /dev/null
+++ b/llama-index-core/llama_index/core/instrumentation/__init__.py
@@ -0,0 +1,30 @@
+from llama_index.core.instrumentation.dispatcher import Dispatcher, Manager
+from llama_index.core.instrumentation.event_handlers import NullEventHandler
+from llama_index.core.instrumentation.span_handlers import NullSpanHandler
+
+root_dispatcher: Dispatcher = Dispatcher(
+    name="root",
+    event_handlers=[NullEventHandler()],
+    span_handler=NullSpanHandler(),
+    propagate=False,
+)
+
+root_manager: Manager = Manager(root_dispatcher)
+
+
+def get_dispatcher(name: str = "root") -> Dispatcher:
+    """Module method that should be used for creating a new Dispatcher."""
+    if name in root_manager.dispatchers:
+        return root_manager.dispatchers[name]
+
+    candidate_parent_name = ".".join(name.split(".")[:-1])
+    if candidate_parent_name in root_manager.dispatchers:
+        parent_name = candidate_parent_name
+    else:
+        parent_name = "root"
+
+    new_dispatcher = Dispatcher(
+        name=name, root=root_dispatcher, parent_name=parent_name, manager=root_manager
+    )
+    root_manager.add_dispatcher(new_dispatcher)
+    return new_dispatcher
diff --git a/llama-index-core/llama_index/core/instrumentation/dispatcher.py b/llama-index-core/llama_index/core/instrumentation/dispatcher.py
new file mode 100644
index 0000000000000000000000000000000000000000..c04b921f55b47ffb202fc42f86e8446a61518bc1
--- /dev/null
+++ b/llama-index-core/llama_index/core/instrumentation/dispatcher.py
@@ -0,0 +1,147 @@
+from typing import Any, List, Optional, Dict
+import functools
+import inspect
+import uuid
+from llama_index.core.bridge.pydantic import BaseModel, Field
+from llama_index.core.instrumentation.event_handlers import BaseEventHandler
+from llama_index.core.instrumentation.span_handlers import (
+    BaseSpanHandler,
+    NullSpanHandler,
+)
+from llama_index.core.instrumentation.events.base import BaseEvent
+
+
+class Dispatcher(BaseModel):
+    name: str = Field(default_factory=str, description="Name of dispatcher")
+    event_handlers: List[BaseEventHandler] = Field(
+        default=[], description="List of attached handlers"
+    )
+    span_handlers: List[BaseSpanHandler] = Field(
+        default=[NullSpanHandler()], description="Span handler."
+    )
+    parent_name: str = Field(
+        default_factory=str, description="Name of parent Dispatcher."
+    )
+    manager: Optional["Manager"] = Field(
+        default=None, description="Dispatcher manager."
+    )
+    root_name: str = Field(default="root", description="Root of the Dispatcher tree.")
+    propagate: bool = Field(
+        default=True,
+        description="Whether to propagate the event to parent dispatchers and their handlers",
+    )
+
+    @property
+    def parent(self) -> "Dispatcher":
+        return self.manager.dispatchers[self.parent_name]
+
+    @property
+    def root(self) -> "Dispatcher":
+        return self.manager.dispatchers[self.root_name]
+
+    def add_event_handler(self, handler: BaseEventHandler) -> None:
+        """Add handler to set of handlers."""
+        self.event_handlers += [handler]
+
+    def add_span_handler(self, handler: BaseSpanHandler) -> None:
+        """Add handler to set of handlers."""
+        self.span_handlers += [handler]
+
+    def event(self, event: BaseEvent, **kwargs) -> None:
+        """Dispatch event to all registered handlers."""
+        c = self
+        while c:
+            for h in c.event_handlers:
+                h.handle(event, **kwargs)
+            if not c.propagate:
+                c = None
+            else:
+                c = c.parent
+
+    def span_enter(self, id: str, **kwargs) -> None:
+        """Send notice to handlers that a span with id has started."""
+        c = self
+        while c:
+            for h in c.span_handlers:
+                h.span_enter(id, **kwargs)
+            if not c.propagate:
+                c = None
+            else:
+                c = c.parent
+
+    def span_drop(self, id: str, err: Optional[Exception], **kwargs) -> None:
+        """Send notice to handlers that a span with id is being dropped."""
+        c = self
+        while c:
+            for h in c.span_handlers:
+                h.span_drop(id, err, **kwargs)
+            if not c.propagate:
+                c = None
+            else:
+                c = c.parent
+
+    def span_exit(self, id: str, result: Optional[Any] = None, **kwargs) -> None:
+        """Send notice to handlers that a span with id is exiting."""
+        c = self
+        while c:
+            for h in c.span_handlers:
+                h.span_exit(id, result, **kwargs)
+            if not c.propagate:
+                c = None
+            else:
+                c = c.parent
+
+    def span(self, func):
+        @functools.wraps(func)
+        def wrapper(*args, **kwargs):
+            id = f"{func.__qualname__}-{uuid.uuid4()}"
+            self.span_enter(id=id, **kwargs)
+            try:
+                result = func(*args, **kwargs)
+            except Exception as e:
+                self.span_drop(id=id, err=e)
+            else:
+                self.span_exit(id=id, result=result)
+                return result
+
+        @functools.wraps(func)
+        async def async_wrapper(*args, **kwargs):
+            id = f"{func.__qualname__}-{uuid.uuid4()}"
+            self.span_enter(id=id, **kwargs)
+            try:
+                result = await func(*args, **kwargs)
+            except Exception as e:
+                self.span_drop(id=id, err=e)
+            else:
+                self.span_exit(id=id, result=result)
+                return result
+
+        if inspect.iscoroutinefunction(func):
+            return async_wrapper
+        else:
+            return wrapper
+
+    @property
+    def log_name(self) -> str:
+        """Name to be used in logging."""
+        if self.parent:
+            return f"{self.parent.name}.{self.name}"
+        else:
+            return self.name
+
+    class Config:
+        arbitrary_types_allowed = True
+
+
+class Manager:
+    def __init__(self, root: Dispatcher) -> None:
+        self.dispatchers: Dict[str, Dispatcher] = {root.name: root}
+
+    def add_dispatcher(self, d: Dispatcher) -> None:
+        if d.name in self.dispatchers:
+            pass
+        else:
+            self.dispatchers[d.name] = d
+
+
+Dispatcher.update_forward_refs()
diff --git a/llama-index-core/llama_index/core/instrumentation/event_handlers/BUILD b/llama-index-core/llama_index/core/instrumentation/event_handlers/BUILD
new file mode 100644
index 0000000000000000000000000000000000000000..db46e8d6c978c67e301dd6c47bee08c1b3fd141c
--- /dev/null
+++ b/llama-index-core/llama_index/core/instrumentation/event_handlers/BUILD
@@ -0,0 +1 @@
+python_sources()
diff --git a/llama-index-core/llama_index/core/instrumentation/event_handlers/__init__.py b/llama-index-core/llama_index/core/instrumentation/event_handlers/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..aac501e2ee5d5e66af4e7e8781499592b6c926c6
--- /dev/null
+++ b/llama-index-core/llama_index/core/instrumentation/event_handlers/__init__.py
@@ -0,0 +1,5 @@
+from llama_index.core.instrumentation.event_handlers.base import BaseEventHandler
+from llama_index.core.instrumentation.event_handlers.null import NullEventHandler
+
+
+__all__ = ["BaseEventHandler", "NullEventHandler"]
diff --git a/llama-index-core/llama_index/core/instrumentation/event_handlers/base.py b/llama-index-core/llama_index/core/instrumentation/event_handlers/base.py
new file mode 100644
index 0000000000000000000000000000000000000000..9f33f5b3dc8413e813519c06c41a80a18dfc432b
--- /dev/null
+++ b/llama-index-core/llama_index/core/instrumentation/event_handlers/base.py
@@ -0,0 +1,20 @@
+from typing import Any
+from abc import abstractmethod
+from llama_index.core.bridge.pydantic import BaseModel
+from llama_index.core.instrumentation.events.base import BaseEvent
+
+
+class BaseEventHandler(BaseModel):
+    """Base callback handler that can be used to track event starts and ends."""
+
+    @classmethod
+    def class_name(cls) -> str:
+        """Class name."""
+        return "BaseEventHandler"
+
+    @abstractmethod
+    def handle(self, event: BaseEvent, **kwargs) -> Any:
+        """Logic for handling event."""
+
+    class Config:
+        arbitrary_types_allowed = True
diff --git a/llama-index-core/llama_index/core/instrumentation/event_handlers/null.py b/llama-index-core/llama_index/core/instrumentation/event_handlers/null.py
new file mode 100644
index 0000000000000000000000000000000000000000..bd1025c54637e2d51d220480aacdb96aceeb60c8
--- /dev/null
+++ b/llama-index-core/llama_index/core/instrumentation/event_handlers/null.py
@@ -0,0 +1,14 @@
+from typing import Any
+from llama_index.core.instrumentation.event_handlers.base import BaseEventHandler
+from llama_index.core.instrumentation.events.base import BaseEvent
+
+
+class NullEventHandler(BaseEventHandler):
+    @classmethod
+    def class_name(cls) -> str:
+        """Class name."""
+        return "NullEventHandler"
+
+    def handle(self, event: BaseEvent, **kwargs) -> Any:
+        """Handle logic - null handler does nothing."""
+        return
diff --git a/llama-index-core/llama_index/core/instrumentation/events/BUILD b/llama-index-core/llama_index/core/instrumentation/events/BUILD
new file mode 100644
index 0000000000000000000000000000000000000000..db46e8d6c978c67e301dd6c47bee08c1b3fd141c
--- /dev/null
+++ b/llama-index-core/llama_index/core/instrumentation/events/BUILD
@@ -0,0 +1 @@
+python_sources()
diff --git a/llama-index-core/llama_index/core/instrumentation/events/__init__.py b/llama-index-core/llama_index/core/instrumentation/events/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..1b06289a4fe024e7870a3e411443526640ff2267
--- /dev/null
+++ b/llama-index-core/llama_index/core/instrumentation/events/__init__.py
@@ -0,0 +1,6 @@
+from llama_index.core.instrumentation.events.base import BaseEvent
+
+
+__all__ = [
+    "BaseEvent",
+]
diff --git a/llama-index-core/llama_index/core/instrumentation/events/agent.py b/llama-index-core/llama_index/core/instrumentation/events/agent.py
new file mode 100644
index 0000000000000000000000000000000000000000..d342d526570a2bc6ba767690760b986d744ec954
--- /dev/null
+++ b/llama-index-core/llama_index/core/instrumentation/events/agent.py
@@ -0,0 +1,40 @@
+from llama_index.core.instrumentation.events.base import BaseEvent
+from llama_index.core.tools.types import ToolMetadata
+
+
+class AgentRunStepStartEvent(BaseEvent):
+    @classmethod
+    def class_name(cls):
+        """Class name."""
+        return "AgentRunStepStartEvent"
+
+
+class AgentRunStepEndEvent(BaseEvent):
+    @classmethod
+    def class_name(cls):
+        """Class name."""
+        return "AgentRunStepEndEvent"
+
+
+class AgentChatWithStepStartEvent(BaseEvent):
+    @classmethod
+    def class_name(cls):
+        """Class name."""
+        return "AgentChatWithStepStartEvent"
+
+
+class AgentChatWithStepEndEvent(BaseEvent):
+    @classmethod
+    def class_name(cls):
+        """Class name."""
+        return "AgentChatWithStepEndEvent"
+
+
+class AgentToolCallEvent(BaseEvent):
+    arguments: str
+    tool: ToolMetadata
+
+    @classmethod
+    def class_name(cls):
+        """Class name."""
+        return "AgentToolCallEvent"
diff --git a/llama-index-core/llama_index/core/instrumentation/events/base.py b/llama-index-core/llama_index/core/instrumentation/events/base.py
new file mode 100644
index 0000000000000000000000000000000000000000..4c7636004fe8575adb18225a20221000fda9348b
--- /dev/null
+++ b/llama-index-core/llama_index/core/instrumentation/events/base.py
@@ -0,0 +1,22 @@
+from typing import Any, Dict
+from llama_index.core.bridge.pydantic import BaseModel, Field
+from uuid import uuid4
+from datetime import datetime
+
+
+class BaseEvent(BaseModel):
+    timestamp: datetime = Field(default_factory=lambda: datetime.now())
+    id_: str = Field(default_factory=lambda: uuid4())
+
+    @classmethod
+    def class_name(cls):
+        """Return class name."""
+        return "BaseEvent"
+
+    class Config:
+        arbitrary_types_allowed = True
+
+    def dict(self, **kwargs: Any) -> Dict[str, Any]:
+        data = super().dict(**kwargs)
+        data["class_name"] = self.class_name()
+        return data
diff --git a/llama-index-core/llama_index/core/instrumentation/events/chat_engine.py b/llama-index-core/llama_index/core/instrumentation/events/chat_engine.py
new file mode 100644
index 0000000000000000000000000000000000000000..fb9e9091bbc32d00c01bf442a8e7052025beb9f6
--- /dev/null
+++ b/llama-index-core/llama_index/core/instrumentation/events/chat_engine.py
@@ -0,0 +1,31 @@
+from llama_index.core.instrumentation.events.base import BaseEvent
+
+
+class StreamChatStartEvent(BaseEvent):
+    @classmethod
+    def class_name(cls):
+        """Class name."""
+        return "StreamChatStartEvent"
+
+
+class StreamChatEndEvent(BaseEvent):
+    @classmethod
+    def class_name(cls):
+        """Class name."""
+        return "StreamChatEndEvent"
+
+
+class StreamChatErrorEvent(BaseEvent):
+    @classmethod
+    def class_name(cls):
+        """Class name."""
+        return "StreamChatErrorEvent"
+
+
+class StreamChatDeltaReceivedEvent(BaseEvent):
+    delta: str
+
+    @classmethod
+    def class_name(cls):
+        """Class name."""
+        return "StreamChatDeltaReceivedEvent"
diff --git a/llama-index-core/llama_index/core/instrumentation/events/embedding.py b/llama-index-core/llama_index/core/instrumentation/events/embedding.py
new file mode 100644
index 0000000000000000000000000000000000000000..087614e7f4fec1d92d26fceb7c23aadc14f68689
--- /dev/null
+++ b/llama-index-core/llama_index/core/instrumentation/events/embedding.py
@@ -0,0 +1,22 @@
+from typing import List
+
+from llama_index.core.instrumentation.events.base import BaseEvent
+
+
+class EmbeddingStartEvent(BaseEvent):
+    model_dict: dict
+
+    @classmethod
+    def class_name(cls):
+        """Class name."""
+        return "EmbeddingStartEvent"
+
+
+class EmbeddingEndEvent(BaseEvent):
+    chunks: List[str]
+    embeddings: List[List[float]]
+
+    @classmethod
+    def class_name(cls):
+        """Class name."""
+        return "EmbeddingEndEvent"
diff --git a/llama-index-core/llama_index/core/instrumentation/events/llm.py b/llama-index-core/llama_index/core/instrumentation/events/llm.py
new file mode 100644
index 0000000000000000000000000000000000000000..b61dde1b84d2232af50e8cf94548c010024585a9
--- /dev/null
+++ b/llama-index-core/llama_index/core/instrumentation/events/llm.py
@@ -0,0 +1,64 @@
+from typing import List
+
+from llama_index.core.base.llms.types import (
+    ChatMessage,
+    ChatResponse,
+    CompletionResponse,
+)
+from llama_index.core.instrumentation.events.base import BaseEvent
+
+
+class LLMPredictStartEvent(BaseEvent):
+    @classmethod
+    def class_name(cls):
+        """Class name."""
+        return "LLMPredictStartEvent"
+
+
+class LLMPredictEndEvent(BaseEvent):
+    @classmethod
+    def class_name(cls):
+        """Class name."""
+        return "LLMPredictEndEvent"
+
+
+class LLMCompletionStartEvent(BaseEvent):
+    prompt: str
+    additional_kwargs: dict
+    model_dict: dict
+
+    @classmethod
+    def class_name(cls):
+        """Class name."""
+        return "LLMCompletionStartEvent"
+
+
+class LLMCompletionEndEvent(BaseEvent):
+    prompt: str
+    response: CompletionResponse
+
+    @classmethod
+    def class_name(cls):
+        """Class name."""
+        return "LLMCompletionEndEvent"
+
+
+class LLMChatStartEvent(BaseEvent):
+    messages: List[ChatMessage]
+    additional_kwargs: dict
+    model_dict: dict
+
+    @classmethod
+    def class_name(cls):
+        """Class name."""
+        return "LLMChatStartEvent"
+
+
+class LLMChatEndEvent(BaseEvent):
+    messages: List[ChatMessage]
+    response: ChatResponse
+
+    @classmethod
+    def class_name(cls):
+        """Class name."""
+        return "LLMChatEndEvent"
diff --git a/llama-index-core/llama_index/core/instrumentation/events/query.py b/llama-index-core/llama_index/core/instrumentation/events/query.py
new file mode 100644
index 0000000000000000000000000000000000000000..2d2b3edf10699ba3ac37a7d881a5ab7d910b882a
--- /dev/null
+++ b/llama-index-core/llama_index/core/instrumentation/events/query.py
@@ -0,0 +1,15 @@
+from llama_index.core.instrumentation.events.base import BaseEvent
+
+
+class QueryStartEvent(BaseEvent):
+    @classmethod
+    def class_name(cls):
+        """Class name."""
+        return "QueryStartEvent"
+
+
+class QueryEndEvent(BaseEvent):
+    @classmethod
+    def class_name(cls):
+        """Class name."""
+        return "QueryEndEvent"
diff --git a/llama-index-core/llama_index/core/instrumentation/events/retrieval.py b/llama-index-core/llama_index/core/instrumentation/events/retrieval.py
new file mode 100644
index 0000000000000000000000000000000000000000..c62b80e7cc003f5d7829e6990c63ecedace65665
--- /dev/null
+++ b/llama-index-core/llama_index/core/instrumentation/events/retrieval.py
@@ -0,0 +1,15 @@
+from llama_index.core.instrumentation.events.base import BaseEvent
+
+
+class RetrievalStartEvent(BaseEvent):
+    @classmethod
+    def class_name(cls):
+        """Class name."""
+        return "RetrievalStartEvent"
+
+
+class RetrievalEndEvent(BaseEvent):
+    @classmethod
+    def class_name(cls):
+        """Class name."""
+        return "RetrievalEndEvent"
diff --git a/llama-index-core/llama_index/core/instrumentation/events/synthesis.py b/llama-index-core/llama_index/core/instrumentation/events/synthesis.py
new file mode 100644
index 0000000000000000000000000000000000000000..71251eaec63602d7fb68437261502beaae4139c5
--- /dev/null
+++ b/llama-index-core/llama_index/core/instrumentation/events/synthesis.py
@@ -0,0 +1,36 @@
+from llama_index.core.base.response.schema import RESPONSE_TYPE
+from llama_index.core.instrumentation.events.base import BaseEvent
+from llama_index.core.schema import QueryType
+
+
+class SynthesizeStartEvent(BaseEvent):
+    query: QueryType
+
+    @classmethod
+    def class_name(cls):
+        """Class name."""
+        return "SynthesizeStartEvent"
+
+
+class SynthesizeEndEvent(BaseEvent):
+    query: QueryType
+    response: RESPONSE_TYPE
+
+    @classmethod
+    def class_name(cls):
+        """Class name."""
+        return "SynthesizeEndEvent"
+
+
+class GetResponseStartEvent(BaseEvent):
+    @classmethod
+    def class_name(cls):
+        """Class name."""
+        return "GetResponseStartEvent"
+
+
+class GetResponseEndEvent(BaseEvent):
+    @classmethod
+    def class_name(cls):
+        """Class name."""
+        return "GetResponseEndEvent"
diff --git a/llama-index-core/llama_index/core/instrumentation/span/BUILD b/llama-index-core/llama_index/core/instrumentation/span/BUILD
new file mode 100644
index 0000000000000000000000000000000000000000..db46e8d6c978c67e301dd6c47bee08c1b3fd141c
--- /dev/null
+++ b/llama-index-core/llama_index/core/instrumentation/span/BUILD
@@ -0,0 +1 @@
+python_sources()
diff --git a/llama-index-core/llama_index/core/instrumentation/span/__init__.py b/llama-index-core/llama_index/core/instrumentation/span/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..d07d99482a34aff6f04edeba0d732af4d267bb39
--- /dev/null
+++ b/llama-index-core/llama_index/core/instrumentation/span/__init__.py
@@ -0,0 +1,5 @@
+from llama_index.core.instrumentation.span.base import BaseSpan
+from llama_index.core.instrumentation.span.simple import SimpleSpan
+
+
+__all__ = ["BaseSpan", "SimpleSpan"]
diff --git a/llama-index-core/llama_index/core/instrumentation/span/base.py b/llama-index-core/llama_index/core/instrumentation/span/base.py
new file mode 100644
index 0000000000000000000000000000000000000000..6a68f3071fdcfae45f57cf7bde2369070559f351
--- /dev/null
+++ b/llama-index-core/llama_index/core/instrumentation/span/base.py
@@ -0,0 +1,12 @@
+from typing import Optional
+from llama_index.core.bridge.pydantic import BaseModel, Field
+
+
+class BaseSpan(BaseModel):
+    """Base data class representing a span."""
+
+    id_: str = Field(default_factory=str, description="Id of span.")
+    parent_id: Optional[str] = Field(default=None, description="Id of parent span.")
+
+    class Config:
+        arbitrary_types_allowed = True
diff --git a/llama-index-core/llama_index/core/instrumentation/span/simple.py b/llama-index-core/llama_index/core/instrumentation/span/simple.py
new file mode 100644
index 0000000000000000000000000000000000000000..ceef9338e11c976cdb744aa9a4810a77c4b19f54
--- /dev/null
+++ b/llama-index-core/llama_index/core/instrumentation/span/simple.py
@@ -0,0 +1,12 @@
+from typing import Optional
+from llama_index.core.bridge.pydantic import Field
+from llama_index.core.instrumentation.span.base import BaseSpan
+from datetime import datetime
+
+
+class SimpleSpan(BaseSpan):
+    """Simple span class."""
+
+    start_time: datetime = Field(default_factory=lambda: datetime.now())
+    end_time: Optional[datetime] = Field(default=None)
+    duration: float = Field(default=float, description="Duration of span in seconds.")
diff --git a/llama-index-core/llama_index/core/instrumentation/span_handlers/BUILD b/llama-index-core/llama_index/core/instrumentation/span_handlers/BUILD
new file mode 100644
index 0000000000000000000000000000000000000000..db46e8d6c978c67e301dd6c47bee08c1b3fd141c
--- /dev/null
+++ b/llama-index-core/llama_index/core/instrumentation/span_handlers/BUILD
@@ -0,0 +1 @@
+python_sources()
diff --git a/llama-index-core/llama_index/core/instrumentation/span_handlers/__init__.py b/llama-index-core/llama_index/core/instrumentation/span_handlers/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..0e8b1cbd3a7ffe59f8e09097e2486841a285a394
--- /dev/null
+++ b/llama-index-core/llama_index/core/instrumentation/span_handlers/__init__.py
@@ -0,0 +1,10 @@
+from llama_index.core.instrumentation.span_handlers.base import BaseSpanHandler
+from llama_index.core.instrumentation.span_handlers.null import NullSpanHandler
+from llama_index.core.instrumentation.span_handlers.simple import SimpleSpanHandler
+
+
+__all__ = [
+    "BaseSpanHandler",
+    "NullSpanHandler",
+    "SimpleSpanHandler",
+]
diff --git a/llama-index-core/llama_index/core/instrumentation/span_handlers/base.py b/llama-index-core/llama_index/core/instrumentation/span_handlers/base.py
new file mode 100644
index 0000000000000000000000000000000000000000..396ef45af490a44e4bf219ba6a706e481c8bb358
--- /dev/null
+++ b/llama-index-core/llama_index/core/instrumentation/span_handlers/base.py
@@ -0,0 +1,65 @@
+from abc import abstractmethod
+from typing import Any, Dict, Generic, Optional, TypeVar
+
+from llama_index.core.bridge.pydantic import BaseModel, Field
+from llama_index.core.instrumentation.span.base import BaseSpan
+
+T = TypeVar("T", bound=BaseSpan)
+
+
+class BaseSpanHandler(BaseModel, Generic[T]):
+    open_spans: Dict[str, T] = Field(
+        default_factory=dict, description="Dictionary of open spans."
+    )
+    current_span_id: Optional[str] = Field(
+        default=None, description="Id of current span."
+    )
+
+    class Config:
+        arbitrary_types_allowed = True
+
+    def class_name(cls) -> str:
+        """Class name."""
+        return "BaseSpanHandler"
+
+    def span_enter(self, id: str, **kwargs) -> None:
+        """Logic for entering a span."""
+        if id in self.open_spans:
+            pass  # should probably raise an error here
+        else:
+            # TODO: thread safe?
+            span = self.new_span(id=id, parent_span_id=self.current_span_id, **kwargs)
+            if span:
+                self.open_spans[id] = span
+                self.current_span_id = id
+
+    def span_exit(self, id: str, result: Optional[Any] = None, **kwargs) -> None:
+        """Logic for exiting a span."""
+        self.prepare_to_exit_span(id, result=result, **kwargs)
+        if self.current_span_id == id:
+            self.current_span_id = self.open_spans[id].parent_id
+        del self.open_spans[id]
+
+    def span_drop(self, id: str, err: Optional[Exception], **kwargs) -> None:
+        """Logic for dropping a span i.e. early exit."""
+        self.prepare_to_drop_span(id, err, **kwargs)
+        if self.current_span_id == id:
+            self.current_span_id = self.open_spans[id].parent_id
+        del self.open_spans[id]
+
+    @abstractmethod
+    def new_span(self, id: str, parent_span_id: Optional[str], **kwargs) -> Optional[T]:
+        """Create a span."""
+        ...
+
+    @abstractmethod
+    def prepare_to_exit_span(
+        self, id: str, result: Optional[Any] = None, **kwargs
+    ) -> Any:
+        """Logic for preparing to exit a span."""
+        ...
+
+    @abstractmethod
+    def prepare_to_drop_span(self, id: str, err: Optional[Exception], **kwargs) -> Any:
+        """Logic for preparing to drop a span."""
+        ...
diff --git a/llama-index-core/llama_index/core/instrumentation/span_handlers/null.py b/llama-index-core/llama_index/core/instrumentation/span_handlers/null.py
new file mode 100644
index 0000000000000000000000000000000000000000..47456b051cd80cd579193e2ea814e385a7e4678b
--- /dev/null
+++ b/llama-index-core/llama_index/core/instrumentation/span_handlers/null.py
@@ -0,0 +1,33 @@
+from typing import Optional, Any
+from llama_index.core.instrumentation.span_handlers.base import BaseSpanHandler
+from llama_index.core.instrumentation.span.base import BaseSpan
+
+
+class NullSpanHandler(BaseSpanHandler[BaseSpan]):
+    @classmethod
+    def class_name(cls) -> str:
+        """Class name."""
+        return "NullSpanHandler"
+
+    def span_enter(self, id: str, **kwargs) -> None:
+        """Logic for entering a span."""
+        return
+
+    def span_exit(self, id: str, result: Optional[Any], **kwargs) -> None:
+        """Logic for exiting a span."""
+        return
+
+    def new_span(self, id: str, parent_span_id: Optional[str], **kwargs) -> None:
+        """Create a span."""
+        return
+
+    def prepare_to_exit_span(
+        self, id: str, result: Optional[Any] = None, **kwargs
+    ) -> None:
+        """Logic for exiting a span."""
+        return
+
+    def prepare_to_drop_span(self, id: str, err: Optional[Exception], **kwargs) -> None:
+        """Logic for droppping a span."""
+        if err:
+            raise err
diff --git a/llama-index-core/llama_index/core/instrumentation/span_handlers/simple.py b/llama-index-core/llama_index/core/instrumentation/span_handlers/simple.py
new file mode 100644
index 0000000000000000000000000000000000000000..4cdee9d67d88f86bca92cfcd7e979e4fc4816e3c
--- /dev/null
+++ b/llama-index-core/llama_index/core/instrumentation/span_handlers/simple.py
@@ -0,0 +1,76 @@
+from typing import Any, cast, List, Optional, TYPE_CHECKING
+from llama_index.core.bridge.pydantic import Field
+from llama_index.core.instrumentation.span.simple import SimpleSpan
+from llama_index.core.instrumentation.span_handlers.base import BaseSpanHandler
+from datetime import datetime
+
+if TYPE_CHECKING:
+    from treelib import Tree
+
+
+class SimpleSpanHandler(BaseSpanHandler[SimpleSpan]):
+    """Span Handler that managest SimpleSpan's."""
+
+    completed_spans: List[SimpleSpan] = Field(
+        default_factory=list, description="List of completed spans."
+    )
+
+    def class_name(cls) -> str:
+        """Class name."""
+        return "SimpleSpanHandler"
+
+    def new_span(self, id: str, parent_span_id: Optional[str], **kwargs) -> SimpleSpan:
+        """Create a span."""
+        return SimpleSpan(id_=id, parent_id=parent_span_id)
+
+    def prepare_to_exit_span(
+        self, id: str, result: Optional[Any] = None, **kwargs
+    ) -> None:
+        """Logic for preparing to drop a span."""
+        span = self.open_spans[id]
+        span = cast(SimpleSpan, span)
+        span.end_time = datetime.now()
+        span.duration = (span.end_time - span.start_time).total_seconds()
+        self.completed_spans += [span]
+
+    def prepare_to_drop_span(self, id: str, err: Optional[Exception], **kwargs) -> None:
+        """Logic for droppping a span."""
+        if err:
+            raise err
+
+    def _get_trace_trees(self) -> List["Tree"]:
+        """Method for getting trace trees."""
+        try:
+            from treelib import Tree
+        except ImportError as e:
+            raise ImportError(
+                "`treelib` package is missing. Please install it by using "
+                "`pip install treelib`."
+            )
+        sorted_spans = sorted(self.completed_spans, key=lambda x: x.start_time)
+
+        trees = []
+        tree = Tree()
+        for span in sorted_spans:
+            if span.parent_id is None:
+                # complete old tree unless its empty (i.e., start of loop)
+                if tree.all_nodes():
+                    trees.append(tree)
+                    # start new tree
+                    tree = Tree()
+
+            tree.create_node(
+                tag=f"{span.id_} ({span.duration})",
+                identifier=span.id_,
+                parent=span.parent_id,
+                data=span.start_time,
+            )
+        trees.append(tree)
+        return trees
+
+    def print_trace_trees(self) -> None:
+        """Method for viewing trace trees."""
+        trees = self._get_trace_trees()
+        for tree in trees:
+            print(tree.show(stdout=False, sorting=True, key=lambda node: node.data))
+            print("")
diff --git a/llama-index-core/llama_index/core/llms/callbacks.py b/llama-index-core/llama_index/core/llms/callbacks.py
index 3050c0a7256530046cba87020c4d37a5b111ffe3..1d25ce4dc9d576fe638bcf0108b969272bbe4bd1 100644
--- a/llama-index-core/llama_index/core/llms/callbacks.py
+++ b/llama-index-core/llama_index/core/llms/callbacks.py
@@ -20,6 +20,17 @@ from llama_index.core.base.llms.types import (
 )
 from llama_index.core.callbacks import CallbackManager, CBEventType, EventPayload
 
+# dispatcher setup
+from llama_index.core.instrumentation import get_dispatcher
+from llama_index.core.instrumentation.events.llm import (
+    LLMCompletionEndEvent,
+    LLMCompletionStartEvent,
+    LLMChatEndEvent,
+    LLMChatStartEvent,
+)
+
+dispatcher = get_dispatcher(__name__)
+
 
 def llm_chat_callback() -> Callable:
     def wrap(f: Callable) -> Callable:
@@ -38,6 +49,13 @@ def llm_chat_callback() -> Callable:
             _self: Any, messages: Sequence[ChatMessage], **kwargs: Any
         ) -> Any:
             with wrapper_logic(_self) as callback_manager:
+                dispatcher.event(
+                    LLMChatStartEvent(
+                        model_dict=_self.to_dict(),
+                        messages=messages,
+                        additional_kwargs=kwargs,
+                    )
+                )
                 event_id = callback_manager.on_event_start(
                     CBEventType.LLM,
                     payload={
@@ -53,6 +71,12 @@ def llm_chat_callback() -> Callable:
                     async def wrapped_gen() -> ChatResponseAsyncGen:
                         last_response = None
                         async for x in f_return_val:
+                            dispatcher.event(
+                                LLMChatEndEvent(
+                                    messages=messages,
+                                    response=x,
+                                )
+                            )
                             yield cast(ChatResponse, x)
                             last_response = x
 
@@ -75,6 +99,12 @@ def llm_chat_callback() -> Callable:
                         },
                         event_id=event_id,
                     )
+                    dispatcher.event(
+                        LLMChatEndEvent(
+                            messages=messages,
+                            response=f_return_val,
+                        )
+                    )
 
             return f_return_val
 
@@ -82,6 +112,13 @@ def llm_chat_callback() -> Callable:
             _self: Any, messages: Sequence[ChatMessage], **kwargs: Any
         ) -> Any:
             with wrapper_logic(_self) as callback_manager:
+                dispatcher.event(
+                    LLMChatStartEvent(
+                        model_dict=_self.to_dict(),
+                        messages=messages,
+                        additional_kwargs=kwargs,
+                    )
+                )
                 event_id = callback_manager.on_event_start(
                     CBEventType.LLM,
                     payload={
@@ -97,6 +134,12 @@ def llm_chat_callback() -> Callable:
                     def wrapped_gen() -> ChatResponseGen:
                         last_response = None
                         for x in f_return_val:
+                            dispatcher.event(
+                                LLMChatEndEvent(
+                                    messages=messages,
+                                    response=x,
+                                )
+                            )
                             yield cast(ChatResponse, x)
                             last_response = x
 
@@ -119,6 +162,12 @@ def llm_chat_callback() -> Callable:
                         },
                         event_id=event_id,
                     )
+                    dispatcher.event(
+                        LLMChatEndEvent(
+                            messages=messages,
+                            response=f_return_val,
+                        )
+                    )
 
             return f_return_val
 
@@ -164,6 +213,13 @@ def llm_completion_callback() -> Callable:
             _self: Any, *args: Any, **kwargs: Any
         ) -> Any:
             with wrapper_logic(_self) as callback_manager:
+                dispatcher.event(
+                    LLMCompletionStartEvent(
+                        model_dict=_self.to_dict(),
+                        prompt=args[0],
+                        additional_kwargs=kwargs,
+                    )
+                )
                 event_id = callback_manager.on_event_start(
                     CBEventType.LLM,
                     payload={
@@ -180,6 +236,12 @@ def llm_completion_callback() -> Callable:
                     async def wrapped_gen() -> CompletionResponseAsyncGen:
                         last_response = None
                         async for x in f_return_val:
+                            dispatcher.event(
+                                LLMCompletionEndEvent(
+                                    prompt=args[0],
+                                    response=x,
+                                )
+                            )
                             yield cast(CompletionResponse, x)
                             last_response = x
 
@@ -202,11 +264,24 @@ def llm_completion_callback() -> Callable:
                         },
                         event_id=event_id,
                     )
+                    dispatcher.event(
+                        LLMCompletionEndEvent(
+                            prompt=args[0],
+                            response=f_return_val,
+                        )
+                    )
 
             return f_return_val
 
         def wrapped_llm_predict(_self: Any, *args: Any, **kwargs: Any) -> Any:
             with wrapper_logic(_self) as callback_manager:
+                dispatcher.event(
+                    LLMCompletionStartEvent(
+                        model_dict=_self.to_dict(),
+                        prompt=args[0],
+                        additional_kwargs=kwargs,
+                    )
+                )
                 event_id = callback_manager.on_event_start(
                     CBEventType.LLM,
                     payload={
@@ -222,6 +297,12 @@ def llm_completion_callback() -> Callable:
                     def wrapped_gen() -> CompletionResponseGen:
                         last_response = None
                         for x in f_return_val:
+                            dispatcher.event(
+                                LLMCompletionEndEvent(
+                                    prompt=args[0],
+                                    response=x,
+                                )
+                            )
                             yield cast(CompletionResponse, x)
                             last_response = x
 
@@ -244,6 +325,12 @@ def llm_completion_callback() -> Callable:
                         },
                         event_id=event_id,
                     )
+                    dispatcher.event(
+                        LLMCompletionEndEvent(
+                            prompt=args[0],
+                            response=f_return_val,
+                        )
+                    )
 
             return f_return_val
 
diff --git a/llama-index-core/llama_index/core/llms/llm.py b/llama-index-core/llama_index/core/llms/llm.py
index 21fd79876aeaa027ff86087e7ecd33fc8351d7c6..134c7d28ad32b2a5b8c2ab100e3b054afd3396d5 100644
--- a/llama-index-core/llama_index/core/llms/llm.py
+++ b/llama-index-core/llama_index/core/llms/llm.py
@@ -47,6 +47,14 @@ from llama_index.core.types import (
     TokenAsyncGen,
     TokenGen,
 )
+from llama_index.core.instrumentation.events.llm import (
+    LLMPredictEndEvent,
+    LLMPredictStartEvent,
+)
+
+import llama_index.core.instrumentation as instrument
+
+dispatcher = instrument.get_dispatcher(__name__)
 
 
 # NOTE: These two protocols are needed to appease mypy
@@ -240,12 +248,14 @@ class LLM(BaseLLM):
 
         return output
 
+    @dispatcher.span
     def predict(
         self,
         prompt: BasePromptTemplate,
         **prompt_args: Any,
     ) -> str:
         """Predict."""
+        dispatcher.event(LLMPredictStartEvent())
         self._log_template_data(prompt, **prompt_args)
 
         if self.metadata.is_chat_model:
@@ -257,6 +267,7 @@ class LLM(BaseLLM):
             response = self.complete(formatted_prompt, formatted=True)
             output = response.text
 
+        dispatcher.event(LLMPredictEndEvent())
         return self._parse_output(output)
 
     def stream(
@@ -281,12 +292,14 @@ class LLM(BaseLLM):
 
         return stream_tokens
 
+    @dispatcher.span
     async def apredict(
         self,
         prompt: BasePromptTemplate,
         **prompt_args: Any,
     ) -> str:
         """Async predict."""
+        dispatcher.event(LLMPredictStartEvent())
         self._log_template_data(prompt, **prompt_args)
 
         if self.metadata.is_chat_model:
@@ -298,6 +311,7 @@ class LLM(BaseLLM):
             response = await self.acomplete(formatted_prompt, formatted=True)
             output = response.text
 
+        dispatcher.event(LLMPredictEndEvent())
         return self._parse_output(output)
 
     async def astream(
diff --git a/llama-index-core/llama_index/core/query_engine/graph_query_engine.py b/llama-index-core/llama_index/core/query_engine/graph_query_engine.py
index f7807885477f826c24a0349b604af0d52a7e5aad..eef39085a7b9bf83b721dd40ca4eef2a2c070a35 100644
--- a/llama-index-core/llama_index/core/query_engine/graph_query_engine.py
+++ b/llama-index-core/llama_index/core/query_engine/graph_query_engine.py
@@ -9,6 +9,9 @@ from llama_index.core.settings import (
     Settings,
     callback_manager_from_settings_or_context,
 )
+import llama_index.core.instrumentation as instrument
+
+dispatcher = instrument.get_dispatcher(__name__)
 
 
 class ComposableGraphQueryEngine(BaseQueryEngine):
@@ -53,6 +56,7 @@ class ComposableGraphQueryEngine(BaseQueryEngine):
     async def _aquery(self, query_bundle: QueryBundle) -> RESPONSE_TYPE:
         return self._query_index(query_bundle, index_id=None, level=0)
 
+    @dispatcher.span
     def _query(self, query_bundle: QueryBundle) -> RESPONSE_TYPE:
         return self._query_index(query_bundle, index_id=None, level=0)
 
diff --git a/llama-index-core/llama_index/core/query_engine/retriever_query_engine.py b/llama-index-core/llama_index/core/query_engine/retriever_query_engine.py
index 29e184433f810659590f08cdfd924a68051da6f0..5bf89372bd263f2a7ba76f3c8f372b1689abfb91 100644
--- a/llama-index-core/llama_index/core/query_engine/retriever_query_engine.py
+++ b/llama-index-core/llama_index/core/query_engine/retriever_query_engine.py
@@ -22,6 +22,9 @@ from llama_index.core.settings import (
     callback_manager_from_settings_or_context,
     llm_from_settings_or_context,
 )
+import llama_index.core.instrumentation as instrument
+
+dispatcher = instrument.get_dispatcher(__name__)
 
 
 class RetrieverQueryEngine(BaseQueryEngine):
@@ -56,7 +59,6 @@ class RetrieverQueryEngine(BaseQueryEngine):
         )
         for node_postprocessor in self._node_postprocessors:
             node_postprocessor.callback_manager = callback_manager
-
         super().__init__(callback_manager=callback_manager)
 
     def _get_prompt_modules(self) -> PromptMixinType:
@@ -178,6 +180,7 @@ class RetrieverQueryEngine(BaseQueryEngine):
             additional_source_nodes=additional_source_nodes,
         )
 
+    @dispatcher.span
     def _query(self, query_bundle: QueryBundle) -> RESPONSE_TYPE:
         """Answer a query."""
         with self.callback_manager.event(
@@ -188,11 +191,11 @@ class RetrieverQueryEngine(BaseQueryEngine):
                 query=query_bundle,
                 nodes=nodes,
             )
-
             query_event.on_end(payload={EventPayload.RESPONSE: response})
 
         return response
 
+    @dispatcher.span
     async def _aquery(self, query_bundle: QueryBundle) -> RESPONSE_TYPE:
         """Answer a query."""
         with self.callback_manager.event(
diff --git a/llama-index-core/llama_index/core/response_synthesizers/base.py b/llama-index-core/llama_index/core/response_synthesizers/base.py
index 967a3bf81400184a602af0a9d0bb9fea24b8bf97..ebedd93aa6f906b7d8e5a2fef8870c437728cb55 100644
--- a/llama-index-core/llama_index/core/response_synthesizers/base.py
+++ b/llama-index-core/llama_index/core/response_synthesizers/base.py
@@ -10,7 +10,7 @@ Will support different modes, from 1) stuffing chunks into prompt,
 
 import logging
 from abc import abstractmethod
-from typing import Any, Dict, Generator, List, Optional, Sequence, Union, AsyncGenerator
+from typing import Any, Dict, Generator, List, Optional, Sequence, AsyncGenerator
 
 from llama_index.core.base.query_pipeline.query import (
     ChainableMixin,
@@ -36,6 +36,7 @@ from llama_index.core.schema import (
     MetadataMode,
     NodeWithScore,
     QueryBundle,
+    QueryType,
 )
 from llama_index.core.service_context import ServiceContext
 from llama_index.core.service_context_elements.llm_predictor import LLMPredictorType
@@ -45,10 +46,17 @@ from llama_index.core.settings import (
     llm_from_settings_or_context,
 )
 from llama_index.core.types import RESPONSE_TEXT_TYPE
+from llama_index.core.instrumentation.events.synthesis import (
+    SynthesizeStartEvent,
+    SynthesizeEndEvent,
+)
+import llama_index.core.instrumentation as instrument
+
+dispatcher = instrument.get_dispatcher(__name__)
 
 logger = logging.getLogger(__name__)
 
-QueryTextType = Union[str, QueryBundle]
+QueryTextType = QueryType
 
 
 def empty_response_generator() -> Generator[str, None, None]:
@@ -82,6 +90,7 @@ class BaseSynthesizer(ChainableMixin, PromptMixin):
             callback_manager
             or callback_manager_from_settings_or_context(Settings, service_context)
         )
+
         self._prompt_helper = (
             prompt_helper
             or Settings._prompt_helper
@@ -184,6 +193,7 @@ class BaseSynthesizer(ChainableMixin, PromptMixin):
             f"Response must be a string or a generator. Found {type(response_str)}"
         )
 
+    @dispatcher.span
     def synthesize(
         self,
         query: QueryTextType,
@@ -191,11 +201,23 @@ class BaseSynthesizer(ChainableMixin, PromptMixin):
         additional_source_nodes: Optional[Sequence[NodeWithScore]] = None,
         **response_kwargs: Any,
     ) -> RESPONSE_TYPE:
+        dispatcher.event(SynthesizeStartEvent(query=query))
+
         if len(nodes) == 0:
             if self._streaming:
-                return StreamingResponse(response_gen=empty_response_generator())
+                empty_response = StreamingResponse(
+                    response_gen=empty_response_generator()
+                )
+                dispatcher.event(
+                    SynthesizeEndEvent(query=query, response=empty_response)
+                )
+                return empty_response
             else:
-                return Response("Empty Response")
+                empty_response = Response("Empty Response")
+                dispatcher.event(
+                    SynthesizeEndEvent(query=query, response=empty_response)
+                )
+                return empty_response
 
         if isinstance(query, str):
             query = QueryBundle(query_str=query)
@@ -218,8 +240,10 @@ class BaseSynthesizer(ChainableMixin, PromptMixin):
 
             event.on_end(payload={EventPayload.RESPONSE: response})
 
+        dispatcher.event(SynthesizeEndEvent(query=query, response=response))
         return response
 
+    @dispatcher.span
     async def asynthesize(
         self,
         query: QueryTextType,
@@ -227,11 +251,22 @@ class BaseSynthesizer(ChainableMixin, PromptMixin):
         additional_source_nodes: Optional[Sequence[NodeWithScore]] = None,
         **response_kwargs: Any,
     ) -> RESPONSE_TYPE:
+        dispatcher.event(SynthesizeStartEvent(query=query))
         if len(nodes) == 0:
             if self._streaming:
-                return AsyncStreamingResponse(response_gen=empty_response_agenerator())
+                empty_response = AsyncStreamingResponse(
+                    response_gen=empty_response_agenerator()
+                )
+                dispatcher.event(
+                    SynthesizeEndEvent(query=query, response=empty_response)
+                )
+                return empty_response
             else:
-                return Response("Empty Response")
+                empty_response = Response("Empty Response")
+                dispatcher.event(
+                    SynthesizeEndEvent(query=query, response=empty_response)
+                )
+                return empty_response
 
         if isinstance(query, str):
             query = QueryBundle(query_str=query)
@@ -254,6 +289,7 @@ class BaseSynthesizer(ChainableMixin, PromptMixin):
 
             event.on_end(payload={EventPayload.RESPONSE: response})
 
+        dispatcher.event(SynthesizeEndEvent(query=query, response=response))
         return response
 
     def _as_query_component(self, **kwargs: Any) -> QueryComponent:
diff --git a/llama-index-core/llama_index/core/response_synthesizers/compact_and_refine.py b/llama-index-core/llama_index/core/response_synthesizers/compact_and_refine.py
index 8a2dbe11b6279241b6ee4e3f86a20a2d67d91a4c..a7da37f8fc210dabbcda6b9f98759cccfb9d402e 100644
--- a/llama-index-core/llama_index/core/response_synthesizers/compact_and_refine.py
+++ b/llama-index-core/llama_index/core/response_synthesizers/compact_and_refine.py
@@ -3,11 +3,15 @@ from typing import Any, List, Optional, Sequence
 from llama_index.core.prompts.prompt_utils import get_biggest_prompt
 from llama_index.core.response_synthesizers.refine import Refine
 from llama_index.core.types import RESPONSE_TEXT_TYPE
+import llama_index.core.instrumentation as instrument
+
+dispatcher = instrument.get_dispatcher(__name__)
 
 
 class CompactAndRefine(Refine):
     """Refine responses across compact text chunks."""
 
+    @dispatcher.span
     async def aget_response(
         self,
         query_str: str,
@@ -23,6 +27,7 @@ class CompactAndRefine(Refine):
             **response_kwargs,
         )
 
+    @dispatcher.span
     def get_response(
         self,
         query_str: str,
diff --git a/llama-index-core/llama_index/core/response_synthesizers/refine.py b/llama-index-core/llama_index/core/response_synthesizers/refine.py
index 059c6f4b3aac232415f32c66e0f30e25ecbc5c6a..61bb4564bc407372a5b952ac5a38f65ff9240ca1 100644
--- a/llama-index-core/llama_index/core/response_synthesizers/refine.py
+++ b/llama-index-core/llama_index/core/response_synthesizers/refine.py
@@ -27,6 +27,13 @@ from llama_index.core.service_context_elements.llm_predictor import (
     LLMPredictorType,
 )
 from llama_index.core.types import RESPONSE_TEXT_TYPE, BasePydanticProgram
+from llama_index.core.instrumentation.events.synthesis import (
+    GetResponseEndEvent,
+    GetResponseStartEvent,
+)
+import llama_index.core.instrumentation as instrument
+
+dispatcher = instrument.get_dispatcher(__name__)
 
 logger = logging.getLogger(__name__)
 
@@ -156,6 +163,7 @@ class Refine(BaseSynthesizer):
         if "refine_template" in prompts:
             self._refine_template = prompts["refine_template"]
 
+    @dispatcher.span
     def get_response(
         self,
         query_str: str,
@@ -164,6 +172,7 @@ class Refine(BaseSynthesizer):
         **response_kwargs: Any,
     ) -> RESPONSE_TEXT_TYPE:
         """Give response over chunks."""
+        dispatcher.event(GetResponseStartEvent())
         response: Optional[RESPONSE_TEXT_TYPE] = None
         for text_chunk in text_chunks:
             if prev_response is None:
@@ -185,6 +194,7 @@ class Refine(BaseSynthesizer):
                 response = response or "Empty Response"
         else:
             response = cast(Generator, response)
+        dispatcher.event(GetResponseEndEvent())
         return response
 
     def _default_program_factory(self, prompt: PromptTemplate) -> BasePydanticProgram:
@@ -332,6 +342,7 @@ class Refine(BaseSynthesizer):
 
         return response
 
+    @dispatcher.span
     async def aget_response(
         self,
         query_str: str,
@@ -339,6 +350,7 @@ class Refine(BaseSynthesizer):
         prev_response: Optional[RESPONSE_TEXT_TYPE] = None,
         **response_kwargs: Any,
     ) -> RESPONSE_TEXT_TYPE:
+        dispatcher.event(GetResponseStartEvent())
         response: Optional[RESPONSE_TEXT_TYPE] = None
         for text_chunk in text_chunks:
             if prev_response is None:
diff --git a/llama-index-core/poetry.lock b/llama-index-core/poetry.lock
index d43313f3c02b097a2988464e87f7ce2927e9a75e..aa9a338c216ffb5db4df2f382f001db81642a636 100644
--- a/llama-index-core/poetry.lock
+++ b/llama-index-core/poetry.lock
@@ -165,13 +165,13 @@ files = [
 
 [[package]]
 name = "anyio"
-version = "4.2.0"
+version = "4.3.0"
 description = "High level compatibility layer for multiple asynchronous event loop implementations"
 optional = false
 python-versions = ">=3.8"
 files = [
-    {file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"},
-    {file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"},
+    {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"},
+    {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"},
 ]
 
 [package.dependencies]
@@ -336,56 +336,60 @@ files = [
 
 [[package]]
 name = "asyncpg"
-version = "0.28.0"
+version = "0.29.0"
 description = "An asyncio PostgreSQL driver"
 optional = true
-python-versions = ">=3.7.0"
+python-versions = ">=3.8.0"
 files = [
-    {file = "asyncpg-0.28.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a6d1b954d2b296292ddff4e0060f494bb4270d87fb3655dd23c5c6096d16d83"},
-    {file = "asyncpg-0.28.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0740f836985fd2bd73dca42c50c6074d1d61376e134d7ad3ad7566c4f79f8184"},
-    {file = "asyncpg-0.28.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e907cf620a819fab1737f2dd90c0f185e2a796f139ac7de6aa3212a8af96c050"},
-    {file = "asyncpg-0.28.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86b339984d55e8202e0c4b252e9573e26e5afa05617ed02252544f7b3e6de3e9"},
-    {file = "asyncpg-0.28.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0c402745185414e4c204a02daca3d22d732b37359db4d2e705172324e2d94e85"},
-    {file = "asyncpg-0.28.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c88eef5e096296626e9688f00ab627231f709d0e7e3fb84bb4413dff81d996d7"},
-    {file = "asyncpg-0.28.0-cp310-cp310-win32.whl", hash = "sha256:90a7bae882a9e65a9e448fdad3e090c2609bb4637d2a9c90bfdcebbfc334bf89"},
-    {file = "asyncpg-0.28.0-cp310-cp310-win_amd64.whl", hash = "sha256:76aacdcd5e2e9999e83c8fbcb748208b60925cc714a578925adcb446d709016c"},
-    {file = "asyncpg-0.28.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a0e08fe2c9b3618459caaef35979d45f4e4f8d4f79490c9fa3367251366af207"},
-    {file = "asyncpg-0.28.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b24e521f6060ff5d35f761a623b0042c84b9c9b9fb82786aadca95a9cb4a893b"},
-    {file = "asyncpg-0.28.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99417210461a41891c4ff301490a8713d1ca99b694fef05dabd7139f9d64bd6c"},
-    {file = "asyncpg-0.28.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f029c5adf08c47b10bcdc857001bbef551ae51c57b3110964844a9d79ca0f267"},
-    {file = "asyncpg-0.28.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ad1d6abf6c2f5152f46fff06b0e74f25800ce8ec6c80967f0bc789974de3c652"},
-    {file = "asyncpg-0.28.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d7fa81ada2807bc50fea1dc741b26a4e99258825ba55913b0ddbf199a10d69d8"},
-    {file = "asyncpg-0.28.0-cp311-cp311-win32.whl", hash = "sha256:f33c5685e97821533df3ada9384e7784bd1e7865d2b22f153f2e4bd4a083e102"},
-    {file = "asyncpg-0.28.0-cp311-cp311-win_amd64.whl", hash = "sha256:5e7337c98fb493079d686a4a6965e8bcb059b8e1b8ec42106322fc6c1c889bb0"},
-    {file = "asyncpg-0.28.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1c56092465e718a9fdcc726cc3d9dcf3a692e4834031c9a9f871d92a75d20d48"},
-    {file = "asyncpg-0.28.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4acd6830a7da0eb4426249d71353e8895b350daae2380cb26d11e0d4a01c5472"},
-    {file = "asyncpg-0.28.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63861bb4a540fa033a56db3bb58b0c128c56fad5d24e6d0a8c37cb29b17c1c7d"},
-    {file = "asyncpg-0.28.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a93a94ae777c70772073d0512f21c74ac82a8a49be3a1d982e3f259ab5f27307"},
-    {file = "asyncpg-0.28.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d14681110e51a9bc9c065c4e7944e8139076a778e56d6f6a306a26e740ed86d2"},
-    {file = "asyncpg-0.28.0-cp37-cp37m-win32.whl", hash = "sha256:8aec08e7310f9ab322925ae5c768532e1d78cfb6440f63c078b8392a38aa636a"},
-    {file = "asyncpg-0.28.0-cp37-cp37m-win_amd64.whl", hash = "sha256:319f5fa1ab0432bc91fb39b3960b0d591e6b5c7844dafc92c79e3f1bff96abef"},
-    {file = "asyncpg-0.28.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b337ededaabc91c26bf577bfcd19b5508d879c0ad009722be5bb0a9dd30b85a0"},
-    {file = "asyncpg-0.28.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4d32b680a9b16d2957a0a3cc6b7fa39068baba8e6b728f2e0a148a67644578f4"},
-    {file = "asyncpg-0.28.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f62f04cdf38441a70f279505ef3b4eadf64479b17e707c950515846a2df197"},
-    {file = "asyncpg-0.28.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f20cac332c2576c79c2e8e6464791c1f1628416d1115935a34ddd7121bfc6a4"},
-    {file = "asyncpg-0.28.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:59f9712ce01e146ff71d95d561fb68bd2d588a35a187116ef05028675462d5ed"},
-    {file = "asyncpg-0.28.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fc9e9f9ff1aa0eddcc3247a180ac9e9b51a62311e988809ac6152e8fb8097756"},
-    {file = "asyncpg-0.28.0-cp38-cp38-win32.whl", hash = "sha256:9e721dccd3838fcff66da98709ed884df1e30a95f6ba19f595a3706b4bc757e3"},
-    {file = "asyncpg-0.28.0-cp38-cp38-win_amd64.whl", hash = "sha256:8ba7d06a0bea539e0487234511d4adf81dc8762249858ed2a580534e1720db00"},
-    {file = "asyncpg-0.28.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d009b08602b8b18edef3a731f2ce6d3f57d8dac2a0a4140367e194eabd3de457"},
-    {file = "asyncpg-0.28.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ec46a58d81446d580fb21b376ec6baecab7288ce5a578943e2fc7ab73bf7eb39"},
-    {file = "asyncpg-0.28.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b48ceed606cce9e64fd5480a9b0b9a95cea2b798bb95129687abd8599c8b019"},
-    {file = "asyncpg-0.28.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8858f713810f4fe67876728680f42e93b7e7d5c7b61cf2118ef9153ec16b9423"},
-    {file = "asyncpg-0.28.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5e18438a0730d1c0c1715016eacda6e9a505fc5aa931b37c97d928d44941b4bf"},
-    {file = "asyncpg-0.28.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e9c433f6fcdd61c21a715ee9128a3ca48be8ac16fa07be69262f016bb0f4dbd2"},
-    {file = "asyncpg-0.28.0-cp39-cp39-win32.whl", hash = "sha256:41e97248d9076bc8e4849da9e33e051be7ba37cd507cbd51dfe4b2d99c70e3dc"},
-    {file = "asyncpg-0.28.0-cp39-cp39-win_amd64.whl", hash = "sha256:3ed77f00c6aacfe9d79e9eff9e21729ce92a4b38e80ea99a58ed382f42ebd55b"},
-    {file = "asyncpg-0.28.0.tar.gz", hash = "sha256:7252cdc3acb2f52feaa3664280d3bcd78a46bd6c10bfd681acfffefa1120e278"},
-]
+    {file = "asyncpg-0.29.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72fd0ef9f00aeed37179c62282a3d14262dbbafb74ec0ba16e1b1864d8a12169"},
+    {file = "asyncpg-0.29.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52e8f8f9ff6e21f9b39ca9f8e3e33a5fcdceaf5667a8c5c32bee158e313be385"},
+    {file = "asyncpg-0.29.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9e6823a7012be8b68301342ba33b4740e5a166f6bbda0aee32bc01638491a22"},
+    {file = "asyncpg-0.29.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:746e80d83ad5d5464cfbf94315eb6744222ab00aa4e522b704322fb182b83610"},
+    {file = "asyncpg-0.29.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ff8e8109cd6a46ff852a5e6bab8b0a047d7ea42fcb7ca5ae6eaae97d8eacf397"},
+    {file = "asyncpg-0.29.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:97eb024685b1d7e72b1972863de527c11ff87960837919dac6e34754768098eb"},
+    {file = "asyncpg-0.29.0-cp310-cp310-win32.whl", hash = "sha256:5bbb7f2cafd8d1fa3e65431833de2642f4b2124be61a449fa064e1a08d27e449"},
+    {file = "asyncpg-0.29.0-cp310-cp310-win_amd64.whl", hash = "sha256:76c3ac6530904838a4b650b2880f8e7af938ee049e769ec2fba7cd66469d7772"},
+    {file = "asyncpg-0.29.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4900ee08e85af01adb207519bb4e14b1cae8fd21e0ccf80fac6aa60b6da37b4"},
+    {file = "asyncpg-0.29.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a65c1dcd820d5aea7c7d82a3fdcb70e096f8f70d1a8bf93eb458e49bfad036ac"},
+    {file = "asyncpg-0.29.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b52e46f165585fd6af4863f268566668407c76b2c72d366bb8b522fa66f1870"},
+    {file = "asyncpg-0.29.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc600ee8ef3dd38b8d67421359779f8ccec30b463e7aec7ed481c8346decf99f"},
+    {file = "asyncpg-0.29.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:039a261af4f38f949095e1e780bae84a25ffe3e370175193174eb08d3cecab23"},
+    {file = "asyncpg-0.29.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6feaf2d8f9138d190e5ec4390c1715c3e87b37715cd69b2c3dfca616134efd2b"},
+    {file = "asyncpg-0.29.0-cp311-cp311-win32.whl", hash = "sha256:1e186427c88225ef730555f5fdda6c1812daa884064bfe6bc462fd3a71c4b675"},
+    {file = "asyncpg-0.29.0-cp311-cp311-win_amd64.whl", hash = "sha256:cfe73ffae35f518cfd6e4e5f5abb2618ceb5ef02a2365ce64f132601000587d3"},
+    {file = "asyncpg-0.29.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6011b0dc29886ab424dc042bf9eeb507670a3b40aece3439944006aafe023178"},
+    {file = "asyncpg-0.29.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b544ffc66b039d5ec5a7454667f855f7fec08e0dfaf5a5490dfafbb7abbd2cfb"},
+    {file = "asyncpg-0.29.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d84156d5fb530b06c493f9e7635aa18f518fa1d1395ef240d211cb563c4e2364"},
+    {file = "asyncpg-0.29.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54858bc25b49d1114178d65a88e48ad50cb2b6f3e475caa0f0c092d5f527c106"},
+    {file = "asyncpg-0.29.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bde17a1861cf10d5afce80a36fca736a86769ab3579532c03e45f83ba8a09c59"},
+    {file = "asyncpg-0.29.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:37a2ec1b9ff88d8773d3eb6d3784dc7e3fee7756a5317b67f923172a4748a175"},
+    {file = "asyncpg-0.29.0-cp312-cp312-win32.whl", hash = "sha256:bb1292d9fad43112a85e98ecdc2e051602bce97c199920586be83254d9dafc02"},
+    {file = "asyncpg-0.29.0-cp312-cp312-win_amd64.whl", hash = "sha256:2245be8ec5047a605e0b454c894e54bf2ec787ac04b1cb7e0d3c67aa1e32f0fe"},
+    {file = "asyncpg-0.29.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0009a300cae37b8c525e5b449233d59cd9868fd35431abc470a3e364d2b85cb9"},
+    {file = "asyncpg-0.29.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cad1324dbb33f3ca0cd2074d5114354ed3be2b94d48ddfd88af75ebda7c43cc"},
+    {file = "asyncpg-0.29.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:012d01df61e009015944ac7543d6ee30c2dc1eb2f6b10b62a3f598beb6531548"},
+    {file = "asyncpg-0.29.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:000c996c53c04770798053e1730d34e30cb645ad95a63265aec82da9093d88e7"},
+    {file = "asyncpg-0.29.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e0bfe9c4d3429706cf70d3249089de14d6a01192d617e9093a8e941fea8ee775"},
+    {file = "asyncpg-0.29.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:642a36eb41b6313ffa328e8a5c5c2b5bea6ee138546c9c3cf1bffaad8ee36dd9"},
+    {file = "asyncpg-0.29.0-cp38-cp38-win32.whl", hash = "sha256:a921372bbd0aa3a5822dd0409da61b4cd50df89ae85150149f8c119f23e8c408"},
+    {file = "asyncpg-0.29.0-cp38-cp38-win_amd64.whl", hash = "sha256:103aad2b92d1506700cbf51cd8bb5441e7e72e87a7b3a2ca4e32c840f051a6a3"},
+    {file = "asyncpg-0.29.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5340dd515d7e52f4c11ada32171d87c05570479dc01dc66d03ee3e150fb695da"},
+    {file = "asyncpg-0.29.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e17b52c6cf83e170d3d865571ba574577ab8e533e7361a2b8ce6157d02c665d3"},
+    {file = "asyncpg-0.29.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f100d23f273555f4b19b74a96840aa27b85e99ba4b1f18d4ebff0734e78dc090"},
+    {file = "asyncpg-0.29.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48e7c58b516057126b363cec8ca02b804644fd012ef8e6c7e23386b7d5e6ce83"},
+    {file = "asyncpg-0.29.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f9ea3f24eb4c49a615573724d88a48bd1b7821c890c2effe04f05382ed9e8810"},
+    {file = "asyncpg-0.29.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8d36c7f14a22ec9e928f15f92a48207546ffe68bc412f3be718eedccdf10dc5c"},
+    {file = "asyncpg-0.29.0-cp39-cp39-win32.whl", hash = "sha256:797ab8123ebaed304a1fad4d7576d5376c3a006a4100380fb9d517f0b59c1ab2"},
+    {file = "asyncpg-0.29.0-cp39-cp39-win_amd64.whl", hash = "sha256:cce08a178858b426ae1aa8409b5cc171def45d4293626e7aa6510696d46decd8"},
+    {file = "asyncpg-0.29.0.tar.gz", hash = "sha256:d1c49e1f44fffafd9a55e1a9b101590859d881d639ea2922516f5d9c512d354e"},
+]
+
+[package.dependencies]
+async-timeout = {version = ">=4.0.3", markers = "python_version < \"3.12.0\""}
 
 [package.extras]
 docs = ["Sphinx (>=5.3.0,<5.4.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"]
-test = ["flake8 (>=5.0,<6.0)", "uvloop (>=0.15.3)"]
+test = ["flake8 (>=6.1,<7.0)", "uvloop (>=0.15.3)"]
 
 [[package]]
 name = "attrs"
@@ -634,13 +638,13 @@ crt = ["awscrt (==0.19.17)"]
 
 [[package]]
 name = "cachetools"
-version = "5.3.2"
+version = "5.3.3"
 description = "Extensible memoizing collections and decorators"
 optional = false
 python-versions = ">=3.7"
 files = [
-    {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"},
-    {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"},
+    {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"},
+    {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"},
 ]
 
 [[package]]
@@ -955,43 +959,43 @@ srsly = ">=2.4.0,<3.0.0"
 
 [[package]]
 name = "cryptography"
-version = "42.0.4"
+version = "42.0.5"
 description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
 optional = false
 python-versions = ">=3.7"
 files = [
-    {file = "cryptography-42.0.4-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:ffc73996c4fca3d2b6c1c8c12bfd3ad00def8621da24f547626bf06441400449"},
-    {file = "cryptography-42.0.4-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:db4b65b02f59035037fde0998974d84244a64c3265bdef32a827ab9b63d61b18"},
-    {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad9c385ba8ee025bb0d856714f71d7840020fe176ae0229de618f14dae7a6e2"},
-    {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69b22ab6506a3fe483d67d1ed878e1602bdd5912a134e6202c1ec672233241c1"},
-    {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e09469a2cec88fb7b078e16d4adec594414397e8879a4341c6ace96013463d5b"},
-    {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3e970a2119507d0b104f0a8e281521ad28fc26f2820687b3436b8c9a5fcf20d1"},
-    {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:e53dc41cda40b248ebc40b83b31516487f7db95ab8ceac1f042626bc43a2f992"},
-    {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c3a5cbc620e1e17009f30dd34cb0d85c987afd21c41a74352d1719be33380885"},
-    {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6bfadd884e7280df24d26f2186e4e07556a05d37393b0f220a840b083dc6a824"},
-    {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:01911714117642a3f1792c7f376db572aadadbafcd8d75bb527166009c9f1d1b"},
-    {file = "cryptography-42.0.4-cp37-abi3-win32.whl", hash = "sha256:fb0cef872d8193e487fc6bdb08559c3aa41b659a7d9be48b2e10747f47863925"},
-    {file = "cryptography-42.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:c1f25b252d2c87088abc8bbc4f1ecbf7c919e05508a7e8628e6875c40bc70923"},
-    {file = "cryptography-42.0.4-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:15a1fb843c48b4a604663fa30af60818cd28f895572386e5f9b8a665874c26e7"},
-    {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1327f280c824ff7885bdeef8578f74690e9079267c1c8bd7dc5cc5aa065ae52"},
-    {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ffb03d419edcab93b4b19c22ee80c007fb2d708429cecebf1dd3258956a563a"},
-    {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:1df6fcbf60560d2113b5ed90f072dc0b108d64750d4cbd46a21ec882c7aefce9"},
-    {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:44a64043f743485925d3bcac548d05df0f9bb445c5fcca6681889c7c3ab12764"},
-    {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:3c6048f217533d89f2f8f4f0fe3044bf0b2090453b7b73d0b77db47b80af8dff"},
-    {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6d0fbe73728c44ca3a241eff9aefe6496ab2656d6e7a4ea2459865f2e8613257"},
-    {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:887623fe0d70f48ab3f5e4dbf234986b1329a64c066d719432d0698522749929"},
-    {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ce8613beaffc7c14f091497346ef117c1798c202b01153a8cc7b8e2ebaaf41c0"},
-    {file = "cryptography-42.0.4-cp39-abi3-win32.whl", hash = "sha256:810bcf151caefc03e51a3d61e53335cd5c7316c0a105cc695f0959f2c638b129"},
-    {file = "cryptography-42.0.4-cp39-abi3-win_amd64.whl", hash = "sha256:a0298bdc6e98ca21382afe914c642620370ce0470a01e1bef6dd9b5354c36854"},
-    {file = "cryptography-42.0.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f8907fcf57392cd917892ae83708761c6ff3c37a8e835d7246ff0ad251d9298"},
-    {file = "cryptography-42.0.4-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:12d341bd42cdb7d4937b0cabbdf2a94f949413ac4504904d0cdbdce4a22cbf88"},
-    {file = "cryptography-42.0.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1cdcdbd117681c88d717437ada72bdd5be9de117f96e3f4d50dab3f59fd9ab20"},
-    {file = "cryptography-42.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0e89f7b84f421c56e7ff69f11c441ebda73b8a8e6488d322ef71746224c20fce"},
-    {file = "cryptography-42.0.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f1e85a178384bf19e36779d91ff35c7617c885da487d689b05c1366f9933ad74"},
-    {file = "cryptography-42.0.4-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d2a27aca5597c8a71abbe10209184e1a8e91c1fd470b5070a2ea60cafec35bcd"},
-    {file = "cryptography-42.0.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4e36685cb634af55e0677d435d425043967ac2f3790ec652b2b88ad03b85c27b"},
-    {file = "cryptography-42.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f47be41843200f7faec0683ad751e5ef11b9a56a220d57f300376cd8aba81660"},
-    {file = "cryptography-42.0.4.tar.gz", hash = "sha256:831a4b37accef30cccd34fcb916a5d7b5be3cbbe27268a02832c3e450aea39cb"},
+    {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"},
+    {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"},
+    {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"},
+    {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"},
+    {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"},
+    {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"},
+    {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"},
+    {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"},
+    {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"},
+    {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"},
+    {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"},
+    {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"},
+    {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"},
+    {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"},
+    {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"},
+    {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"},
+    {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"},
+    {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"},
+    {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"},
+    {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"},
+    {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"},
+    {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"},
+    {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"},
+    {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"},
+    {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"},
+    {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"},
+    {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"},
+    {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"},
+    {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"},
+    {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"},
+    {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"},
+    {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"},
 ]
 
 [package.dependencies]
@@ -1066,43 +1070,45 @@ typing-inspect = ">=0.4.0,<1"
 
 [[package]]
 name = "datasets"
-version = "2.14.4"
+version = "2.18.0"
 description = "HuggingFace community-driven open-source library of datasets"
 optional = true
 python-versions = ">=3.8.0"
 files = [
-    {file = "datasets-2.14.4-py3-none-any.whl", hash = "sha256:29336bd316a7d827ccd4da2236596279b20ca2ac78f64c04c9483da7cbc2459b"},
-    {file = "datasets-2.14.4.tar.gz", hash = "sha256:ef29c2b5841de488cd343cfc26ab979bff77efa4d2285af51f1ad7db5c46a83b"},
+    {file = "datasets-2.18.0-py3-none-any.whl", hash = "sha256:f1bbf0e2896917a914de01cbd37075b14deea3837af87ad0d9f697388ccaeb50"},
+    {file = "datasets-2.18.0.tar.gz", hash = "sha256:cdf8b8c6abf7316377ba4f49f9589a4c74556d6b481afd0abd2284f3d69185cb"},
 ]
 
 [package.dependencies]
 aiohttp = "*"
-dill = ">=0.3.0,<0.3.8"
-fsspec = {version = ">=2021.11.1", extras = ["http"]}
-huggingface-hub = ">=0.14.0,<1.0.0"
+dill = ">=0.3.0,<0.3.9"
+filelock = "*"
+fsspec = {version = ">=2023.1.0,<=2024.2.0", extras = ["http"]}
+huggingface-hub = ">=0.19.4"
 multiprocess = "*"
 numpy = ">=1.17"
 packaging = "*"
 pandas = "*"
-pyarrow = ">=8.0.0"
+pyarrow = ">=12.0.0"
+pyarrow-hotfix = "*"
 pyyaml = ">=5.1"
 requests = ">=2.19.0"
 tqdm = ">=4.62.1"
 xxhash = "*"
 
 [package.extras]
-apache-beam = ["apache-beam (>=2.26.0,<2.44.0)"]
+apache-beam = ["apache-beam (>=2.26.0)"]
 audio = ["librosa", "soundfile (>=0.12.1)"]
 benchmarks = ["tensorflow (==2.12.0)", "torch (==2.0.1)", "transformers (==4.30.1)"]
-dev = ["Pillow (>=6.2.1)", "absl-py", "apache-beam (>=2.26.0,<2.44.0)", "black (>=23.1,<24.0)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "joblib (<1.3.0)", "joblibspark", "librosa", "lz4", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "pyyaml (>=5.3.1)", "rarfile (>=4.0)", "ruff (>=0.0.241)", "s3fs", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "sqlalchemy (<2.0.0)", "tensorflow (>=2.2.0,!=2.6.0,!=2.6.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos", "tiktoken", "torch", "transformers", "zstandard"]
+dev = ["Pillow (>=6.2.1)", "absl-py", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "jax (>=0.3.14)", "jaxlib (>=0.3.14)", "joblib (<1.3.0)", "joblibspark", "librosa", "lz4", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "ruff (>=0.3.0)", "s3fs", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "sqlalchemy", "tensorflow (>=2.2.0,!=2.6.0,!=2.6.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos", "tiktoken", "torch", "torch (>=2.0.0)", "transformers", "typing-extensions (>=4.6.1)", "zstandard"]
 docs = ["s3fs", "tensorflow (>=2.2.0,!=2.6.0,!=2.6.1)", "tensorflow-macos", "torch", "transformers"]
-jax = ["jax (>=0.2.8,!=0.3.2,<=0.3.25)", "jaxlib (>=0.1.65,<=0.3.25)"]
+jax = ["jax (>=0.3.14)", "jaxlib (>=0.3.14)"]
 metrics-tests = ["Werkzeug (>=1.0.1)", "accelerate", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "requests-file (>=1.5.1)", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "six (>=1.15.0,<1.16.0)", "spacy (>=3.0.0)", "texttable (>=1.6.3)", "tldextract", "tldextract (>=3.1.0)", "toml (>=0.10.1)", "typer (<0.5.0)"]
-quality = ["black (>=23.1,<24.0)", "pyyaml (>=5.3.1)", "ruff (>=0.0.241)"]
+quality = ["ruff (>=0.3.0)"]
 s3 = ["s3fs"]
 tensorflow = ["tensorflow (>=2.2.0,!=2.6.0,!=2.6.1)", "tensorflow-macos"]
 tensorflow-gpu = ["tensorflow-gpu (>=2.2.0,!=2.6.0,!=2.6.1)"]
-tests = ["Pillow (>=6.2.1)", "absl-py", "apache-beam (>=2.26.0,<2.44.0)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "joblib (<1.3.0)", "joblibspark", "librosa", "lz4", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "sqlalchemy (<2.0.0)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos", "tiktoken", "torch", "transformers", "zstandard"]
+tests = ["Pillow (>=6.2.1)", "absl-py", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "jax (>=0.3.14)", "jaxlib (>=0.3.14)", "joblib (<1.3.0)", "joblibspark", "librosa", "lz4", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "sqlalchemy", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos", "tiktoken", "torch (>=2.0.0)", "transformers", "typing-extensions (>=4.6.1)", "zstandard"]
 torch = ["torch"]
 vision = ["Pillow (>=6.2.1)"]
 
@@ -1178,17 +1184,18 @@ dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"]
 
 [[package]]
 name = "dill"
-version = "0.3.7"
+version = "0.3.8"
 description = "serialize all of Python"
 optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
 files = [
-    {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"},
-    {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"},
+    {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"},
+    {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"},
 ]
 
 [package.extras]
 graph = ["objgraph (>=1.7.2)"]
+profile = ["gprof2dot (>=2022.7.29)"]
 
 [[package]]
 name = "dirtyjson"
@@ -1236,22 +1243,22 @@ files = [
 
 [[package]]
 name = "dnspython"
-version = "2.5.0"
+version = "2.6.1"
 description = "DNS toolkit"
 optional = false
 python-versions = ">=3.8"
 files = [
-    {file = "dnspython-2.5.0-py3-none-any.whl", hash = "sha256:6facdf76b73c742ccf2d07add296f178e629da60be23ce4b0a9c927b1e02c3a6"},
-    {file = "dnspython-2.5.0.tar.gz", hash = "sha256:a0034815a59ba9ae888946be7ccca8f7c157b286f8455b379c692efb51022a15"},
+    {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"},
+    {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"},
 ]
 
 [package.extras]
-dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=5.0.3)", "mypy (>=1.0.1)", "pylint (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0.0)", "sphinx (>=7.0.0)", "twine (>=4.0.0)", "wheel (>=0.41.0)"]
+dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"]
 dnssec = ["cryptography (>=41)"]
-doh = ["h2 (>=4.1.0)", "httpcore (>=0.17.3)", "httpx (>=0.25.1)"]
-doq = ["aioquic (>=0.9.20)"]
-idna = ["idna (>=2.1)"]
-trio = ["trio (>=0.14)"]
+doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"]
+doq = ["aioquic (>=0.9.25)"]
+idna = ["idna (>=3.6)"]
+trio = ["trio (>=0.23)"]
 wmi = ["wmi (>=1.5.1)"]
 
 [[package]]
@@ -1393,13 +1400,13 @@ typing = ["typing-extensions (>=4.8)"]
 
 [[package]]
 name = "flatbuffers"
-version = "23.5.26"
+version = "24.3.7"
 description = "The FlatBuffers serialization format for Python"
 optional = true
 python-versions = "*"
 files = [
-    {file = "flatbuffers-23.5.26-py2.py3-none-any.whl", hash = "sha256:c0ff356da363087b915fde4b8b45bdda73432fc17cddb3c8157472eab1422ad1"},
-    {file = "flatbuffers-23.5.26.tar.gz", hash = "sha256:9ea1144cac05ce5d86e2859f431c6cd5e66cd9c78c558317c7955fb8d4c78d89"},
+    {file = "flatbuffers-24.3.7-py2.py3-none-any.whl", hash = "sha256:80c4f5dcad0ee76b7e349671a0d657f2fbba927a0244f88dd3f5ed6a3694e1fc"},
+    {file = "flatbuffers-24.3.7.tar.gz", hash = "sha256:0895c22b9a6019ff2f4de2e5e2f7cd15914043e6e7033a94c0c6369422690f22"},
 ]
 
 [[package]]
@@ -1602,13 +1609,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"]
 
 [[package]]
 name = "google-auth"
-version = "2.28.0"
+version = "2.28.2"
 description = "Google Authentication Library"
 optional = false
 python-versions = ">=3.7"
 files = [
-    {file = "google-auth-2.28.0.tar.gz", hash = "sha256:3cfc1b6e4e64797584fb53fc9bd0b7afa9b7c0dba2004fa7dcc9349e58cc3195"},
-    {file = "google_auth-2.28.0-py2.py3-none-any.whl", hash = "sha256:7634d29dcd1e101f5226a23cbc4a0c6cda6394253bf80e281d9c5c6797869c53"},
+    {file = "google-auth-2.28.2.tar.gz", hash = "sha256:80b8b4969aa9ed5938c7828308f20f035bc79f9d8fb8120bf9dc8db20b41ba30"},
+    {file = "google_auth-2.28.2-py2.py3-none-any.whl", hash = "sha256:9fd67bbcd40f16d9d42f950228e9cf02a2ded4ae49198b27432d0cded5a74c38"},
 ]
 
 [package.dependencies]
@@ -1625,13 +1632,13 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"]
 
 [[package]]
 name = "googleapis-common-protos"
-version = "1.62.0"
+version = "1.63.0"
 description = "Common protobufs used in Google APIs"
 optional = false
 python-versions = ">=3.7"
 files = [
-    {file = "googleapis-common-protos-1.62.0.tar.gz", hash = "sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277"},
-    {file = "googleapis_common_protos-1.62.0-py2.py3-none-any.whl", hash = "sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07"},
+    {file = "googleapis-common-protos-1.63.0.tar.gz", hash = "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e"},
+    {file = "googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632"},
 ]
 
 [package.dependencies]
@@ -1658,13 +1665,13 @@ requests = "*"
 
 [[package]]
 name = "gradientai"
-version = "1.7.0"
+version = "1.8.0"
 description = "Gradient AI API"
 optional = true
 python-versions = ">=3.8.1,<4.0.0"
 files = [
-    {file = "gradientai-1.7.0-py3-none-any.whl", hash = "sha256:8f7736e4ae4688a066ba111b7218115175fa5a4e20d1f3a57b11eb675c6b5d39"},
-    {file = "gradientai-1.7.0.tar.gz", hash = "sha256:b5071bb1483c87736b1cc0e9883a532d023e18fd473795c83eff6b1ca12c5c08"},
+    {file = "gradientai-1.8.0-py3-none-any.whl", hash = "sha256:d2d245f922d04faf7212a36f451271658c79099c87252fc4fc6c534b016bbe70"},
+    {file = "gradientai-1.8.0.tar.gz", hash = "sha256:d170609c62d3ab7e7bdbd247a9f2a836692090220bab84dc3ba21d33da191345"},
 ]
 
 [package.dependencies]
@@ -1746,84 +1753,84 @@ test = ["objgraph", "psutil"]
 
 [[package]]
 name = "grpcio"
-version = "1.60.1"
+version = "1.62.1"
 description = "HTTP/2-based RPC framework"
 optional = false
 python-versions = ">=3.7"
 files = [
-    {file = "grpcio-1.60.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:14e8f2c84c0832773fb3958240c69def72357bc11392571f87b2d7b91e0bb092"},
-    {file = "grpcio-1.60.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:33aed0a431f5befeffd9d346b0fa44b2c01aa4aeae5ea5b2c03d3e25e0071216"},
-    {file = "grpcio-1.60.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:fead980fbc68512dfd4e0c7b1f5754c2a8e5015a04dea454b9cada54a8423525"},
-    {file = "grpcio-1.60.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:082081e6a36b6eb5cf0fd9a897fe777dbb3802176ffd08e3ec6567edd85bc104"},
-    {file = "grpcio-1.60.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55ccb7db5a665079d68b5c7c86359ebd5ebf31a19bc1a91c982fd622f1e31ff2"},
-    {file = "grpcio-1.60.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9b54577032d4f235452f77a83169b6527bf4b77d73aeada97d45b2aaf1bf5ce0"},
-    {file = "grpcio-1.60.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7d142bcd604166417929b071cd396aa13c565749a4c840d6c702727a59d835eb"},
-    {file = "grpcio-1.60.1-cp310-cp310-win32.whl", hash = "sha256:2a6087f234cb570008a6041c8ffd1b7d657b397fdd6d26e83d72283dae3527b1"},
-    {file = "grpcio-1.60.1-cp310-cp310-win_amd64.whl", hash = "sha256:f2212796593ad1d0235068c79836861f2201fc7137a99aa2fea7beeb3b101177"},
-    {file = "grpcio-1.60.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:79ae0dc785504cb1e1788758c588c711f4e4a0195d70dff53db203c95a0bd303"},
-    {file = "grpcio-1.60.1-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:4eec8b8c1c2c9b7125508ff7c89d5701bf933c99d3910e446ed531cd16ad5d87"},
-    {file = "grpcio-1.60.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:8c9554ca8e26241dabe7951aa1fa03a1ba0856688ecd7e7bdbdd286ebc272e4c"},
-    {file = "grpcio-1.60.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91422ba785a8e7a18725b1dc40fbd88f08a5bb4c7f1b3e8739cab24b04fa8a03"},
-    {file = "grpcio-1.60.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cba6209c96828711cb7c8fcb45ecef8c8859238baf15119daa1bef0f6c84bfe7"},
-    {file = "grpcio-1.60.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c71be3f86d67d8d1311c6076a4ba3b75ba5703c0b856b4e691c9097f9b1e8bd2"},
-    {file = "grpcio-1.60.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:af5ef6cfaf0d023c00002ba25d0751e5995fa0e4c9eec6cd263c30352662cbce"},
-    {file = "grpcio-1.60.1-cp311-cp311-win32.whl", hash = "sha256:a09506eb48fa5493c58f946c46754ef22f3ec0df64f2b5149373ff31fb67f3dd"},
-    {file = "grpcio-1.60.1-cp311-cp311-win_amd64.whl", hash = "sha256:49c9b6a510e3ed8df5f6f4f3c34d7fbf2d2cae048ee90a45cd7415abab72912c"},
-    {file = "grpcio-1.60.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:b58b855d0071575ea9c7bc0d84a06d2edfbfccec52e9657864386381a7ce1ae9"},
-    {file = "grpcio-1.60.1-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:a731ac5cffc34dac62053e0da90f0c0b8560396a19f69d9703e88240c8f05858"},
-    {file = "grpcio-1.60.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:cf77f8cf2a651fbd869fbdcb4a1931464189cd210abc4cfad357f1cacc8642a6"},
-    {file = "grpcio-1.60.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c557e94e91a983e5b1e9c60076a8fd79fea1e7e06848eb2e48d0ccfb30f6e073"},
-    {file = "grpcio-1.60.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:069fe2aeee02dfd2135d562d0663fe70fbb69d5eed6eb3389042a7e963b54de8"},
-    {file = "grpcio-1.60.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb0af13433dbbd1c806e671d81ec75bd324af6ef75171fd7815ca3074fe32bfe"},
-    {file = "grpcio-1.60.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2f44c32aef186bbba254129cea1df08a20be414144ac3bdf0e84b24e3f3b2e05"},
-    {file = "grpcio-1.60.1-cp312-cp312-win32.whl", hash = "sha256:a212e5dea1a4182e40cd3e4067ee46be9d10418092ce3627475e995cca95de21"},
-    {file = "grpcio-1.60.1-cp312-cp312-win_amd64.whl", hash = "sha256:6e490fa5f7f5326222cb9f0b78f207a2b218a14edf39602e083d5f617354306f"},
-    {file = "grpcio-1.60.1-cp37-cp37m-linux_armv7l.whl", hash = "sha256:4216e67ad9a4769117433814956031cb300f85edc855252a645a9a724b3b6594"},
-    {file = "grpcio-1.60.1-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:73e14acd3d4247169955fae8fb103a2b900cfad21d0c35f0dcd0fdd54cd60367"},
-    {file = "grpcio-1.60.1-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:6ecf21d20d02d1733e9c820fb5c114c749d888704a7ec824b545c12e78734d1c"},
-    {file = "grpcio-1.60.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33bdea30dcfd4f87b045d404388469eb48a48c33a6195a043d116ed1b9a0196c"},
-    {file = "grpcio-1.60.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53b69e79d00f78c81eecfb38f4516080dc7f36a198b6b37b928f1c13b3c063e9"},
-    {file = "grpcio-1.60.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:39aa848794b887120b1d35b1b994e445cc028ff602ef267f87c38122c1add50d"},
-    {file = "grpcio-1.60.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:72153a0d2e425f45b884540a61c6639436ddafa1829a42056aa5764b84108b8e"},
-    {file = "grpcio-1.60.1-cp37-cp37m-win_amd64.whl", hash = "sha256:50d56280b482875d1f9128ce596e59031a226a8b84bec88cb2bf76c289f5d0de"},
-    {file = "grpcio-1.60.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:6d140bdeb26cad8b93c1455fa00573c05592793c32053d6e0016ce05ba267549"},
-    {file = "grpcio-1.60.1-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:bc808924470643b82b14fe121923c30ec211d8c693e747eba8a7414bc4351a23"},
-    {file = "grpcio-1.60.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:70c83bb530572917be20c21f3b6be92cd86b9aecb44b0c18b1d3b2cc3ae47df0"},
-    {file = "grpcio-1.60.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b106bc52e7f28170e624ba61cc7dc6829566e535a6ec68528f8e1afbed1c41f"},
-    {file = "grpcio-1.60.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30e980cd6db1088c144b92fe376747328d5554bc7960ce583ec7b7d81cd47287"},
-    {file = "grpcio-1.60.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0c5807e9152eff15f1d48f6b9ad3749196f79a4a050469d99eecb679be592acc"},
-    {file = "grpcio-1.60.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f1c3dc536b3ee124e8b24feb7533e5c70b9f2ef833e3b2e5513b2897fd46763a"},
-    {file = "grpcio-1.60.1-cp38-cp38-win32.whl", hash = "sha256:d7404cebcdb11bb5bd40bf94131faf7e9a7c10a6c60358580fe83913f360f929"},
-    {file = "grpcio-1.60.1-cp38-cp38-win_amd64.whl", hash = "sha256:c8754c75f55781515a3005063d9a05878b2cfb3cb7e41d5401ad0cf19de14872"},
-    {file = "grpcio-1.60.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:0250a7a70b14000fa311de04b169cc7480be6c1a769b190769d347939d3232a8"},
-    {file = "grpcio-1.60.1-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:660fc6b9c2a9ea3bb2a7e64ba878c98339abaf1811edca904ac85e9e662f1d73"},
-    {file = "grpcio-1.60.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:76eaaba891083fcbe167aa0f03363311a9f12da975b025d30e94b93ac7a765fc"},
-    {file = "grpcio-1.60.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5d97c65ea7e097056f3d1ead77040ebc236feaf7f71489383d20f3b4c28412a"},
-    {file = "grpcio-1.60.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb2a2911b028f01c8c64d126f6b632fcd8a9ac975aa1b3855766c94e4107180"},
-    {file = "grpcio-1.60.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:5a1ebbae7e2214f51b1f23b57bf98eeed2cf1ba84e4d523c48c36d5b2f8829ff"},
-    {file = "grpcio-1.60.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a66f4d2a005bc78e61d805ed95dedfcb35efa84b7bba0403c6d60d13a3de2d6"},
-    {file = "grpcio-1.60.1-cp39-cp39-win32.whl", hash = "sha256:8d488fbdbf04283f0d20742b64968d44825617aa6717b07c006168ed16488804"},
-    {file = "grpcio-1.60.1-cp39-cp39-win_amd64.whl", hash = "sha256:61b7199cd2a55e62e45bfb629a35b71fc2c0cb88f686a047f25b1112d3810904"},
-    {file = "grpcio-1.60.1.tar.gz", hash = "sha256:dd1d3a8d1d2e50ad9b59e10aa7f07c7d1be2b367f3f2d33c5fade96ed5460962"},
-]
-
-[package.extras]
-protobuf = ["grpcio-tools (>=1.60.1)"]
+    {file = "grpcio-1.62.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:179bee6f5ed7b5f618844f760b6acf7e910988de77a4f75b95bbfaa8106f3c1e"},
+    {file = "grpcio-1.62.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:48611e4fa010e823ba2de8fd3f77c1322dd60cb0d180dc6630a7e157b205f7ea"},
+    {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:b2a0e71b0a2158aa4bce48be9f8f9eb45cbd17c78c7443616d00abbe2a509f6d"},
+    {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fbe80577c7880911d3ad65e5ecc997416c98f354efeba2f8d0f9112a67ed65a5"},
+    {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58f6c693d446964e3292425e1d16e21a97a48ba9172f2d0df9d7b640acb99243"},
+    {file = "grpcio-1.62.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:77c339403db5a20ef4fed02e4d1a9a3d9866bf9c0afc77a42234677313ea22f3"},
+    {file = "grpcio-1.62.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b5a4ea906db7dec694098435d84bf2854fe158eb3cd51e1107e571246d4d1d70"},
+    {file = "grpcio-1.62.1-cp310-cp310-win32.whl", hash = "sha256:4187201a53f8561c015bc745b81a1b2d278967b8de35f3399b84b0695e281d5f"},
+    {file = "grpcio-1.62.1-cp310-cp310-win_amd64.whl", hash = "sha256:844d1f3fb11bd1ed362d3fdc495d0770cfab75761836193af166fee113421d66"},
+    {file = "grpcio-1.62.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:833379943d1728a005e44103f17ecd73d058d37d95783eb8f0b28ddc1f54d7b2"},
+    {file = "grpcio-1.62.1-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:c7fcc6a32e7b7b58f5a7d27530669337a5d587d4066060bcb9dee7a8c833dfb7"},
+    {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:fa7d28eb4d50b7cbe75bb8b45ed0da9a1dc5b219a0af59449676a29c2eed9698"},
+    {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48f7135c3de2f298b833be8b4ae20cafe37091634e91f61f5a7eb3d61ec6f660"},
+    {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71f11fd63365ade276c9d4a7b7df5c136f9030e3457107e1791b3737a9b9ed6a"},
+    {file = "grpcio-1.62.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4b49fd8fe9f9ac23b78437da94c54aa7e9996fbb220bac024a67469ce5d0825f"},
+    {file = "grpcio-1.62.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:482ae2ae78679ba9ed5752099b32e5fe580443b4f798e1b71df412abf43375db"},
+    {file = "grpcio-1.62.1-cp311-cp311-win32.whl", hash = "sha256:1faa02530b6c7426404372515fe5ddf66e199c2ee613f88f025c6f3bd816450c"},
+    {file = "grpcio-1.62.1-cp311-cp311-win_amd64.whl", hash = "sha256:5bd90b8c395f39bc82a5fb32a0173e220e3f401ff697840f4003e15b96d1befc"},
+    {file = "grpcio-1.62.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:b134d5d71b4e0837fff574c00e49176051a1c532d26c052a1e43231f252d813b"},
+    {file = "grpcio-1.62.1-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:d1f6c96573dc09d50dbcbd91dbf71d5cf97640c9427c32584010fbbd4c0e0037"},
+    {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:359f821d4578f80f41909b9ee9b76fb249a21035a061a327f91c953493782c31"},
+    {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a485f0c2010c696be269184bdb5ae72781344cb4e60db976c59d84dd6354fac9"},
+    {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b50b09b4dc01767163d67e1532f948264167cd27f49e9377e3556c3cba1268e1"},
+    {file = "grpcio-1.62.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3227c667dccbe38f2c4d943238b887bac588d97c104815aecc62d2fd976e014b"},
+    {file = "grpcio-1.62.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3952b581eb121324853ce2b191dae08badb75cd493cb4e0243368aa9e61cfd41"},
+    {file = "grpcio-1.62.1-cp312-cp312-win32.whl", hash = "sha256:83a17b303425104d6329c10eb34bba186ffa67161e63fa6cdae7776ff76df73f"},
+    {file = "grpcio-1.62.1-cp312-cp312-win_amd64.whl", hash = "sha256:6696ffe440333a19d8d128e88d440f91fb92c75a80ce4b44d55800e656a3ef1d"},
+    {file = "grpcio-1.62.1-cp37-cp37m-linux_armv7l.whl", hash = "sha256:e3393b0823f938253370ebef033c9fd23d27f3eae8eb9a8f6264900c7ea3fb5a"},
+    {file = "grpcio-1.62.1-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:83e7ccb85a74beaeae2634f10eb858a0ed1a63081172649ff4261f929bacfd22"},
+    {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:882020c87999d54667a284c7ddf065b359bd00251fcd70279ac486776dbf84ec"},
+    {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a10383035e864f386fe096fed5c47d27a2bf7173c56a6e26cffaaa5a361addb1"},
+    {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:960edebedc6b9ada1ef58e1c71156f28689978188cd8cff3b646b57288a927d9"},
+    {file = "grpcio-1.62.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:23e2e04b83f347d0aadde0c9b616f4726c3d76db04b438fd3904b289a725267f"},
+    {file = "grpcio-1.62.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:978121758711916d34fe57c1f75b79cdfc73952f1481bb9583399331682d36f7"},
+    {file = "grpcio-1.62.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9084086190cc6d628f282e5615f987288b95457292e969b9205e45b442276407"},
+    {file = "grpcio-1.62.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:22bccdd7b23c420a27fd28540fb5dcbc97dc6be105f7698cb0e7d7a420d0e362"},
+    {file = "grpcio-1.62.1-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:8999bf1b57172dbc7c3e4bb3c732658e918f5c333b2942243f10d0d653953ba9"},
+    {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:d9e52558b8b8c2f4ac05ac86344a7417ccdd2b460a59616de49eb6933b07a0bd"},
+    {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1714e7bc935780bc3de1b3fcbc7674209adf5208ff825799d579ffd6cd0bd505"},
+    {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8842ccbd8c0e253c1f189088228f9b433f7a93b7196b9e5b6f87dba393f5d5d"},
+    {file = "grpcio-1.62.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1f1e7b36bdff50103af95a80923bf1853f6823dd62f2d2a2524b66ed74103e49"},
+    {file = "grpcio-1.62.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bba97b8e8883a8038606480d6b6772289f4c907f6ba780fa1f7b7da7dfd76f06"},
+    {file = "grpcio-1.62.1-cp38-cp38-win32.whl", hash = "sha256:a7f615270fe534548112a74e790cd9d4f5509d744dd718cd442bf016626c22e4"},
+    {file = "grpcio-1.62.1-cp38-cp38-win_amd64.whl", hash = "sha256:e6c8c8693df718c5ecbc7babb12c69a4e3677fd11de8886f05ab22d4e6b1c43b"},
+    {file = "grpcio-1.62.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:73db2dc1b201d20ab7083e7041946910bb991e7e9761a0394bbc3c2632326483"},
+    {file = "grpcio-1.62.1-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:407b26b7f7bbd4f4751dbc9767a1f0716f9fe72d3d7e96bb3ccfc4aace07c8de"},
+    {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:f8de7c8cef9261a2d0a62edf2ccea3d741a523c6b8a6477a340a1f2e417658de"},
+    {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd5c8a1af40ec305d001c60236308a67e25419003e9bb3ebfab5695a8d0b369"},
+    {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be0477cb31da67846a33b1a75c611f88bfbcd427fe17701b6317aefceee1b96f"},
+    {file = "grpcio-1.62.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:60dcd824df166ba266ee0cfaf35a31406cd16ef602b49f5d4dfb21f014b0dedd"},
+    {file = "grpcio-1.62.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:973c49086cabab773525f6077f95e5a993bfc03ba8fc32e32f2c279497780585"},
+    {file = "grpcio-1.62.1-cp39-cp39-win32.whl", hash = "sha256:12859468e8918d3bd243d213cd6fd6ab07208195dc140763c00dfe901ce1e1b4"},
+    {file = "grpcio-1.62.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7209117bbeebdfa5d898205cc55153a51285757902dd73c47de498ad4d11332"},
+    {file = "grpcio-1.62.1.tar.gz", hash = "sha256:6c455e008fa86d9e9a9d85bb76da4277c0d7d9668a3bfa70dbe86e9f3c759947"},
+]
+
+[package.extras]
+protobuf = ["grpcio-tools (>=1.62.1)"]
 
 [[package]]
 name = "grpcio-status"
-version = "1.60.1"
+version = "1.62.1"
 description = "Status proto mapping for gRPC"
 optional = false
 python-versions = ">=3.6"
 files = [
-    {file = "grpcio-status-1.60.1.tar.gz", hash = "sha256:61b5aab8989498e8aa142c20b88829ea5d90d18c18c853b9f9e6d407d37bf8b4"},
-    {file = "grpcio_status-1.60.1-py3-none-any.whl", hash = "sha256:3034fdb239185b6e0f3169d08c268c4507481e4b8a434c21311a03d9eb5889a0"},
+    {file = "grpcio-status-1.62.1.tar.gz", hash = "sha256:3431c8abbab0054912c41df5c72f03ddf3b7a67be8a287bb3c18a3456f96ff77"},
+    {file = "grpcio_status-1.62.1-py3-none-any.whl", hash = "sha256:af0c3ab85da31669f21749e8d53d669c061ebc6ce5637be49a46edcb7aa8ab17"},
 ]
 
 [package.dependencies]
 googleapis-common-protos = ">=1.5.5"
-grpcio = ">=1.60.1"
+grpcio = ">=1.62.1"
 protobuf = ">=4.21.6"
 
 [[package]]
@@ -1868,13 +1875,13 @@ files = [
 
 [[package]]
 name = "httpcore"
-version = "1.0.3"
+version = "1.0.4"
 description = "A minimal low-level HTTP client."
 optional = false
 python-versions = ">=3.8"
 files = [
-    {file = "httpcore-1.0.3-py3-none-any.whl", hash = "sha256:9a6a501c3099307d9fd76ac244e08503427679b1e81ceb1d922485e2f2462ad2"},
-    {file = "httpcore-1.0.3.tar.gz", hash = "sha256:5c0f9546ad17dac4d0772b0808856eb616eb8b48ce94f49ed819fd6982a8a544"},
+    {file = "httpcore-1.0.4-py3-none-any.whl", hash = "sha256:ac418c1db41bade2ad53ae2f3834a3a0f5ae76b56cf5aa497d2d033384fc7d73"},
+    {file = "httpcore-1.0.4.tar.gz", hash = "sha256:cb2839ccfcba0d2d3c1131d3c3e26dfc327326fbe7a5dc0dbfe9f6c9151bb022"},
 ]
 
 [package.dependencies]
@@ -1885,17 +1892,17 @@ h11 = ">=0.13,<0.15"
 asyncio = ["anyio (>=4.0,<5.0)"]
 http2 = ["h2 (>=3,<5)"]
 socks = ["socksio (==1.*)"]
-trio = ["trio (>=0.22.0,<0.24.0)"]
+trio = ["trio (>=0.22.0,<0.25.0)"]
 
 [[package]]
 name = "httpx"
-version = "0.26.0"
+version = "0.27.0"
 description = "The next generation HTTP client."
 optional = false
 python-versions = ">=3.8"
 files = [
-    {file = "httpx-0.26.0-py3-none-any.whl", hash = "sha256:8915f5a3627c4d47b73e8202457cb28f1266982d1159bd5779d86a80c0eab1cd"},
-    {file = "httpx-0.26.0.tar.gz", hash = "sha256:451b55c30d5185ea6b23c2c793abf9bb237d2a7dfb901ced6ff69ad37ec1dfaf"},
+    {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"},
+    {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"},
 ]
 
 [package.dependencies]
@@ -1913,13 +1920,13 @@ socks = ["socksio (==1.*)"]
 
 [[package]]
 name = "huggingface-hub"
-version = "0.20.3"
+version = "0.21.4"
 description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub"
 optional = true
 python-versions = ">=3.8.0"
 files = [
-    {file = "huggingface_hub-0.20.3-py3-none-any.whl", hash = "sha256:d988ae4f00d3e307b0c80c6a05ca6dbb7edba8bba3079f74cda7d9c2e562a7b6"},
-    {file = "huggingface_hub-0.20.3.tar.gz", hash = "sha256:94e7f8e074475fbc67d6a71957b678e1b4a74ff1b64a644fd6cbb83da962d05d"},
+    {file = "huggingface_hub-0.21.4-py3-none-any.whl", hash = "sha256:df37c2c37fc6c82163cdd8a67ede261687d80d1e262526d6c0ce73b6b3630a7b"},
+    {file = "huggingface_hub-0.21.4.tar.gz", hash = "sha256:e1f4968c93726565a80edf6dc309763c7b546d0cfe79aa221206034d50155531"},
 ]
 
 [package.dependencies]
@@ -1936,11 +1943,12 @@ all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi",
 cli = ["InquirerPy (==0.3.4)"]
 dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.1.3)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"]
 fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"]
+hf-transfer = ["hf-transfer (>=0.1.4)"]
 inference = ["aiohttp", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)"]
 quality = ["mypy (==1.5.1)", "ruff (>=0.1.3)"]
 tensorflow = ["graphviz", "pydot", "tensorflow"]
 testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"]
-torch = ["torch"]
+torch = ["safetensors", "torch"]
 typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"]
 
 [[package]]
@@ -1959,13 +1967,13 @@ pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_ve
 
 [[package]]
 name = "identify"
-version = "2.5.34"
+version = "2.5.35"
 description = "File identification library for Python"
 optional = false
 python-versions = ">=3.8"
 files = [
-    {file = "identify-2.5.34-py2.py3-none-any.whl", hash = "sha256:a4316013779e433d08b96e5eabb7f641e6c7942e4ab5d4c509ebd2e7a8994aed"},
-    {file = "identify-2.5.34.tar.gz", hash = "sha256:ee17bc9d499899bc9eaec1ac7bf2dc9eedd480db9d88b96d123d3b64a9d34f5d"},
+    {file = "identify-2.5.35-py2.py3-none-any.whl", hash = "sha256:c4de0081837b211594f8e877a6b4fad7ca32bbfc1a9307fdd61c28bfe923f13e"},
+    {file = "identify-2.5.35.tar.gz", hash = "sha256:10a7ca245cfcd756a554a7288159f72ff105ad233c7c4b9c6f0f4d108f5f6791"},
 ]
 
 [package.extras]
@@ -1995,32 +2003,32 @@ files = [
 
 [[package]]
 name = "importlib-metadata"
-version = "7.0.1"
+version = "7.0.2"
 description = "Read metadata from Python packages"
 optional = false
 python-versions = ">=3.8"
 files = [
-    {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"},
-    {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"},
+    {file = "importlib_metadata-7.0.2-py3-none-any.whl", hash = "sha256:f4bc4c0c070c490abf4ce96d715f68e95923320370efb66143df00199bb6c100"},
+    {file = "importlib_metadata-7.0.2.tar.gz", hash = "sha256:198f568f3230878cb1b44fbd7975f87906c22336dba2e4a7f05278c281fbd792"},
 ]
 
 [package.dependencies]
 zipp = ">=0.5"
 
 [package.extras]
-docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"]
+docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
 perf = ["ipython"]
-testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"]
+testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"]
 
 [[package]]
 name = "importlib-resources"
-version = "6.1.1"
+version = "6.1.3"
 description = "Read resources from Python packages"
 optional = false
 python-versions = ">=3.8"
 files = [
-    {file = "importlib_resources-6.1.1-py3-none-any.whl", hash = "sha256:e8bf90d8213b486f428c9c39714b920041cb02c184686a3dee24905aaa8105d6"},
-    {file = "importlib_resources-6.1.1.tar.gz", hash = "sha256:3893a00122eafde6894c59914446a512f728a0c1a45f9bb9b63721b6bacf0b4a"},
+    {file = "importlib_resources-6.1.3-py3-none-any.whl", hash = "sha256:4c0269e3580fe2634d364b39b38b961540a7738c02cb984e98add8b4221d793d"},
+    {file = "importlib_resources-6.1.3.tar.gz", hash = "sha256:56fb4525197b78544a3354ea27793952ab93f935bb4bf746b846bb1015020f2b"},
 ]
 
 [package.dependencies]
@@ -2028,7 +2036,7 @@ zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""}
 
 [package.extras]
 docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"]
-testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff", "zipp (>=3.17)"]
+testing = ["jaraco.collections", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"]
 
 [[package]]
 name = "iniconfig"
@@ -2054,13 +2062,13 @@ files = [
 
 [[package]]
 name = "ipykernel"
-version = "6.29.2"
+version = "6.29.3"
 description = "IPython Kernel for Jupyter"
 optional = false
 python-versions = ">=3.8"
 files = [
-    {file = "ipykernel-6.29.2-py3-none-any.whl", hash = "sha256:50384f5c577a260a1d53f1f59a828c7266d321c9b7d00d345693783f66616055"},
-    {file = "ipykernel-6.29.2.tar.gz", hash = "sha256:3bade28004e3ff624ed57974948116670604ac5f676d12339693f3142176d3f0"},
+    {file = "ipykernel-6.29.3-py3-none-any.whl", hash = "sha256:5aa086a4175b0229d4eca211e181fb473ea78ffd9869af36ba7694c947302a21"},
+    {file = "ipykernel-6.29.3.tar.gz", hash = "sha256:e14c250d1f9ea3989490225cc1a542781b095a18a19447fcf2b5eaf7d0ac5bd2"},
 ]
 
 [package.dependencies]
@@ -2083,7 +2091,7 @@ cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"]
 docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"]
 pyqt5 = ["pyqt5"]
 pyside6 = ["pyside6"]
-test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (==0.23.4)", "pytest-cov", "pytest-timeout"]
+test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.23.5)", "pytest-cov", "pytest-timeout"]
 
 [[package]]
 name = "ipython"
@@ -2232,13 +2240,13 @@ files = [
 
 [[package]]
 name = "json5"
-version = "0.9.14"
+version = "0.9.22"
 description = "A Python implementation of the JSON5 data format."
 optional = false
-python-versions = "*"
+python-versions = ">=3.8"
 files = [
-    {file = "json5-0.9.14-py2.py3-none-any.whl", hash = "sha256:740c7f1b9e584a468dbb2939d8d458db3427f2c93ae2139d05f47e453eae964f"},
-    {file = "json5-0.9.14.tar.gz", hash = "sha256:9ed66c3a6ca3510a976a9ef9b8c0787de24802724ab1860bc0153c7fdd589b02"},
+    {file = "json5-0.9.22-py3-none-any.whl", hash = "sha256:6621007c70897652f8b5d03885f732771c48d1925591ad989aa80c7e0e5ad32f"},
+    {file = "json5-0.9.22.tar.gz", hash = "sha256:b729bde7650b2196a35903a597d2b704b8fdf8648bfb67368cfb79f1174a17bd"},
 ]
 
 [package.extras]
@@ -2470,13 +2478,13 @@ test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "p
 
 [[package]]
 name = "jupyter-lsp"
-version = "2.2.2"
+version = "2.2.4"
 description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server"
 optional = false
 python-versions = ">=3.8"
 files = [
-    {file = "jupyter-lsp-2.2.2.tar.gz", hash = "sha256:256d24620542ae4bba04a50fc1f6ffe208093a07d8e697fea0a8d1b8ca1b7e5b"},
-    {file = "jupyter_lsp-2.2.2-py3-none-any.whl", hash = "sha256:3b95229e4168355a8c91928057c1621ac3510ba98b2a925e82ebd77f078b1aa5"},
+    {file = "jupyter-lsp-2.2.4.tar.gz", hash = "sha256:5e50033149344065348e688608f3c6d654ef06d9856b67655bd7b6bac9ee2d59"},
+    {file = "jupyter_lsp-2.2.4-py3-none-any.whl", hash = "sha256:da61cb63a16b6dff5eac55c2699cc36eac975645adee02c41bdfc03bf4802e77"},
 ]
 
 [package.dependencies]
@@ -2485,13 +2493,13 @@ jupyter-server = ">=1.1.2"
 
 [[package]]
 name = "jupyter-server"
-version = "2.12.5"
+version = "2.13.0"
 description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications."
 optional = false
 python-versions = ">=3.8"
 files = [
-    {file = "jupyter_server-2.12.5-py3-none-any.whl", hash = "sha256:184a0f82809a8522777cfb6b760ab6f4b1bb398664c5860a27cec696cb884923"},
-    {file = "jupyter_server-2.12.5.tar.gz", hash = "sha256:0edb626c94baa22809be1323f9770cf1c00a952b17097592e40d03e6a3951689"},
+    {file = "jupyter_server-2.13.0-py3-none-any.whl", hash = "sha256:77b2b49c3831fbbfbdb5048cef4350d12946191f833a24e5f83e5f8f4803e97b"},
+    {file = "jupyter_server-2.13.0.tar.gz", hash = "sha256:c80bfb049ea20053c3d9641c2add4848b38073bf79f1729cea1faed32fc1c78e"},
 ]
 
 [package.dependencies]
@@ -2517,7 +2525,7 @@ websocket-client = "*"
 
 [package.extras]
 docs = ["ipykernel", "jinja2", "jupyter-client", "jupyter-server", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-openapi (>=0.8.0)", "sphinxcontrib-spelling", "sphinxemoji", "tornado", "typing-extensions"]
-test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.4)", "pytest-timeout", "requests"]
+test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.7)", "pytest-timeout", "requests"]
 
 [[package]]
 name = "jupyter-server-terminals"
@@ -2540,13 +2548,13 @@ test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (>
 
 [[package]]
 name = "jupyterlab"
-version = "4.1.1"
+version = "4.1.4"
 description = "JupyterLab computational environment"
 optional = false
 python-versions = ">=3.8"
 files = [
-    {file = "jupyterlab-4.1.1-py3-none-any.whl", hash = "sha256:fa3e8c18b804eac04e51ceebd9dd3dd396e08106816f0d09cc426799d7087632"},
-    {file = "jupyterlab-4.1.1.tar.gz", hash = "sha256:8acc9f561729d8f32c14c294c397917cddfeeb13a5d46f811979b71b4911a9fd"},
+    {file = "jupyterlab-4.1.4-py3-none-any.whl", hash = "sha256:f92c3f2b12b88efcf767205f49be9b2f86b85544f9c4f342bb5e9904a16cf931"},
+    {file = "jupyterlab-4.1.4.tar.gz", hash = "sha256:e03c82c124ad8a0892e498b9dde79c50868b2c267819aca3f55ce47c57ebeb1d"},
 ]
 
 [package.dependencies]
@@ -2585,13 +2593,13 @@ files = [
 
 [[package]]
 name = "jupyterlab-server"
-version = "2.25.3"
+version = "2.25.4"
 description = "A set of server components for JupyterLab and JupyterLab like applications."
 optional = false
 python-versions = ">=3.8"
 files = [
-    {file = "jupyterlab_server-2.25.3-py3-none-any.whl", hash = "sha256:c48862519fded9b418c71645d85a49b2f0ec50d032ba8316738e9276046088c1"},
-    {file = "jupyterlab_server-2.25.3.tar.gz", hash = "sha256:846f125a8a19656611df5b03e5912c8393cea6900859baa64fa515eb64a8dc40"},
+    {file = "jupyterlab_server-2.25.4-py3-none-any.whl", hash = "sha256:eb645ecc8f9b24bac5decc7803b6d5363250e16ec5af814e516bc2c54dd88081"},
+    {file = "jupyterlab_server-2.25.4.tar.gz", hash = "sha256:2098198e1e82e0db982440f9b5136175d73bea2cd42a6480aa6fd502cb23c4f9"},
 ]
 
 [package.dependencies]
@@ -2607,7 +2615,7 @@ requests = ">=2.31"
 [package.extras]
 docs = ["autodoc-traits", "jinja2 (<3.2.0)", "mistune (<4)", "myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-copybutton", "sphinxcontrib-openapi (>0.8)"]
 openapi = ["openapi-core (>=0.18.0,<0.19.0)", "ruamel-yaml"]
-test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-validator (>=0.6.0,<0.8.0)", "pytest (>=7.0)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter[server] (>=0.6.2)", "pytest-timeout", "requests-mock", "ruamel-yaml", "sphinxcontrib-spelling", "strict-rfc3339", "werkzeug"]
+test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-validator (>=0.6.0,<0.8.0)", "pytest (>=7.0,<8)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter[server] (>=0.6.2)", "pytest-timeout", "requests-mock", "ruamel-yaml", "sphinxcontrib-spelling", "strict-rfc3339", "werkzeug"]
 
 [[package]]
 name = "jupyterlab-widgets"
@@ -2663,19 +2671,19 @@ text-helpers = ["chardet (>=5.1.0,<6.0.0)"]
 
 [[package]]
 name = "langchain-community"
-version = "0.0.25"
+version = "0.0.27"
 description = "Community contributed LangChain integrations."
 optional = true
 python-versions = ">=3.8.1,<4.0"
 files = [
-    {file = "langchain_community-0.0.25-py3-none-any.whl", hash = "sha256:09b931ba710b1a10e449396d59f38575e0554acd527287937c33a2c4abdc6d83"},
-    {file = "langchain_community-0.0.25.tar.gz", hash = "sha256:b6c8c14cd6ec2635e51e3974bf78a8de3b959bbedb4af55aad164f8cf392f0c5"},
+    {file = "langchain_community-0.0.27-py3-none-any.whl", hash = "sha256:377a7429580a71d909012df5aae538d295fa6f21bc479e5dac6fd1589762b3ab"},
+    {file = "langchain_community-0.0.27.tar.gz", hash = "sha256:266dffbd4c1666db1889cad953fa5102d4debff782335353b6d78636a761778d"},
 ]
 
 [package.dependencies]
 aiohttp = ">=3.8.3,<4.0.0"
 dataclasses-json = ">=0.5.7,<0.7"
-langchain-core = ">=0.1.28,<0.2.0"
+langchain-core = ">=0.1.30,<0.2.0"
 langsmith = ">=0.1.0,<0.2.0"
 numpy = ">=1,<2"
 PyYAML = ">=5.3"
@@ -2685,7 +2693,7 @@ tenacity = ">=8.1.0,<9.0.0"
 
 [package.extras]
 cli = ["typer (>=0.9.0,<0.10.0)"]
-extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "tree-sitter (>=0.20.2,<0.21.0)", "tree-sitter-languages (>=1.8.0,<2.0.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)", "zhipuai (>=1.0.7,<2.0.0)"]
+extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cloudpickle (>=2.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "tree-sitter (>=0.20.2,<0.21.0)", "tree-sitter-languages (>=1.8.0,<2.0.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)", "zhipuai (>=1.0.7,<2.0.0)"]
 
 [[package]]
 name = "langchain-core"
@@ -2744,13 +2752,13 @@ data = ["language-data (>=1.1,<2.0)"]
 
 [[package]]
 name = "langsmith"
-version = "0.1.22"
+version = "0.1.23"
 description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
 optional = false
 python-versions = ">=3.8.1,<4.0"
 files = [
-    {file = "langsmith-0.1.22-py3-none-any.whl", hash = "sha256:b877d302bd4cf7c79e9e6e24bedf669132abf0659143390a29350eda0945544f"},
-    {file = "langsmith-0.1.22.tar.gz", hash = "sha256:2921ae2297c2fb23baa2641b9cf416914ac7fd65f4a9dd5a573bc30efb54b693"},
+    {file = "langsmith-0.1.23-py3-none-any.whl", hash = "sha256:69984268b9867cb31b875965b3f86b6f56ba17dd5454d487d3a1a999bdaeea69"},
+    {file = "langsmith-0.1.23.tar.gz", hash = "sha256:327c66ec0de8c1bc57bfa47bbc70a29ef749e97c3e5571b9baf754d1e0644220"},
 ]
 
 [package.dependencies]
@@ -3052,22 +3060,21 @@ files = [
 
 [[package]]
 name = "marshmallow"
-version = "3.20.2"
+version = "3.21.1"
 description = "A lightweight library for converting complex datatypes to and from native Python datatypes."
 optional = false
 python-versions = ">=3.8"
 files = [
-    {file = "marshmallow-3.20.2-py3-none-any.whl", hash = "sha256:c21d4b98fee747c130e6bc8f45c4b3199ea66bc00c12ee1f639f0aeca034d5e9"},
-    {file = "marshmallow-3.20.2.tar.gz", hash = "sha256:4c1daff273513dc5eb24b219a8035559dc573c8f322558ef85f5438ddd1236dd"},
+    {file = "marshmallow-3.21.1-py3-none-any.whl", hash = "sha256:f085493f79efb0644f270a9bf2892843142d80d7174bbbd2f3713f2a589dc633"},
+    {file = "marshmallow-3.21.1.tar.gz", hash = "sha256:4e65e9e0d80fc9e609574b9983cf32579f305c718afb30d7233ab818571768c3"},
 ]
 
 [package.dependencies]
 packaging = ">=17.0"
 
 [package.extras]
-dev = ["pre-commit (>=2.4,<4.0)", "pytest", "pytz", "simplejson", "tox"]
-docs = ["alabaster (==0.7.15)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"]
-lint = ["pre-commit (>=2.4,<4.0)"]
+dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"]
+docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==4.0.0)", "sphinx-version-warning (==1.1.2)"]
 tests = ["pytest", "pytz", "simplejson"]
 
 [[package]]
@@ -3179,22 +3186,22 @@ tests = ["pytest (>=4.6)"]
 
 [[package]]
 name = "msal"
-version = "1.26.0"
+version = "1.27.0"
 description = "The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect."
 optional = true
 python-versions = ">=2.7"
 files = [
-    {file = "msal-1.26.0-py2.py3-none-any.whl", hash = "sha256:be77ba6a8f49c9ff598bbcdc5dfcf1c9842f3044300109af738e8c3e371065b5"},
-    {file = "msal-1.26.0.tar.gz", hash = "sha256:224756079fe338be838737682b49f8ebc20a87c1c5eeaf590daae4532b83de15"},
+    {file = "msal-1.27.0-py2.py3-none-any.whl", hash = "sha256:572d07149b83e7343a85a3bcef8e581167b4ac76befcbbb6eef0c0e19643cdc0"},
+    {file = "msal-1.27.0.tar.gz", hash = "sha256:3109503c038ba6b307152b0e8d34f98113f2e7a78986e28d0baf5b5303afda52"},
 ]
 
 [package.dependencies]
-cryptography = ">=0.6,<44"
+cryptography = ">=0.6,<45"
 PyJWT = {version = ">=1.0.0,<3", extras = ["crypto"]}
 requests = ">=2.0.0,<3"
 
 [package.extras]
-broker = ["pymsalruntime (>=0.13.2,<0.14)"]
+broker = ["pymsalruntime (>=0.13.2,<0.15)"]
 
 [[package]]
 name = "multidict"
@@ -3297,31 +3304,27 @@ files = [
 
 [[package]]
 name = "multiprocess"
-version = "0.70.15"
+version = "0.70.16"
 description = "better multiprocessing and multithreading in Python"
 optional = true
-python-versions = ">=3.7"
+python-versions = ">=3.8"
 files = [
-    {file = "multiprocess-0.70.15-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:aa36c7ed16f508091438687fe9baa393a7a8e206731d321e443745e743a0d4e5"},
-    {file = "multiprocess-0.70.15-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:20e024018c46d0d1602024c613007ac948f9754659e3853b0aa705e83f6931d8"},
-    {file = "multiprocess-0.70.15-pp37-pypy37_pp73-manylinux_2_24_i686.whl", hash = "sha256:e576062981c91f0fe8a463c3d52506e598dfc51320a8dd8d78b987dfca91c5db"},
-    {file = "multiprocess-0.70.15-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:e73f497e6696a0f5433ada2b3d599ae733b87a6e8b008e387c62ac9127add177"},
-    {file = "multiprocess-0.70.15-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:73db2e7b32dcc7f9b0f075c2ffa45c90b6729d3f1805f27e88534c8d321a1be5"},
-    {file = "multiprocess-0.70.15-pp38-pypy38_pp73-manylinux_2_24_i686.whl", hash = "sha256:4271647bd8a49c28ecd6eb56a7fdbd3c212c45529ad5303b40b3c65fc6928e5f"},
-    {file = "multiprocess-0.70.15-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:cf981fb998d6ec3208cb14f0cf2e9e80216e834f5d51fd09ebc937c32b960902"},
-    {file = "multiprocess-0.70.15-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:18f9f2c7063346d1617bd1684fdcae8d33380ae96b99427260f562e1a1228b67"},
-    {file = "multiprocess-0.70.15-pp39-pypy39_pp73-manylinux_2_24_i686.whl", hash = "sha256:0eac53214d664c49a34695e5824872db4006b1a465edd7459a251809c3773370"},
-    {file = "multiprocess-0.70.15-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:1a51dd34096db47fb21fa2b839e615b051d51b97af9a67afbcdaa67186b44883"},
-    {file = "multiprocess-0.70.15-py310-none-any.whl", hash = "sha256:7dd58e33235e83cf09d625e55cffd7b0f0eede7ee9223cdd666a87624f60c21a"},
-    {file = "multiprocess-0.70.15-py311-none-any.whl", hash = "sha256:134f89053d82c9ed3b73edd3a2531eb791e602d4f4156fc92a79259590bd9670"},
-    {file = "multiprocess-0.70.15-py37-none-any.whl", hash = "sha256:f7d4a1629bccb433114c3b4885f69eccc200994323c80f6feee73b0edc9199c5"},
-    {file = "multiprocess-0.70.15-py38-none-any.whl", hash = "sha256:bee9afba476c91f9ebee7beeee0601face9eff67d822e893f9a893725fbd6316"},
-    {file = "multiprocess-0.70.15-py39-none-any.whl", hash = "sha256:3e0953f5d52b4c76f1c973eaf8214554d146f2be5decb48e928e55c7a2d19338"},
-    {file = "multiprocess-0.70.15.tar.gz", hash = "sha256:f20eed3036c0ef477b07a4177cf7c1ba520d9a2677870a4f47fe026f0cd6787e"},
+    {file = "multiprocess-0.70.16-pp310-pypy310_pp73-macosx_10_13_x86_64.whl", hash = "sha256:476887be10e2f59ff183c006af746cb6f1fd0eadcfd4ef49e605cbe2659920ee"},
+    {file = "multiprocess-0.70.16-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d951bed82c8f73929ac82c61f01a7b5ce8f3e5ef40f5b52553b4f547ce2b08ec"},
+    {file = "multiprocess-0.70.16-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:37b55f71c07e2d741374998c043b9520b626a8dddc8b3129222ca4f1a06ef67a"},
+    {file = "multiprocess-0.70.16-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba8c31889abf4511c7308a8c52bb4a30b9d590e7f58523302ba00237702ca054"},
+    {file = "multiprocess-0.70.16-pp39-pypy39_pp73-macosx_10_13_x86_64.whl", hash = "sha256:0dfd078c306e08d46d7a8d06fb120313d87aa43af60d66da43ffff40b44d2f41"},
+    {file = "multiprocess-0.70.16-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e7b9d0f307cd9bd50851afaac0dba2cb6c44449efff697df7c7645f7d3f2be3a"},
+    {file = "multiprocess-0.70.16-py310-none-any.whl", hash = "sha256:c4a9944c67bd49f823687463660a2d6daae94c289adff97e0f9d696ba6371d02"},
+    {file = "multiprocess-0.70.16-py311-none-any.whl", hash = "sha256:af4cabb0dac72abfb1e794fa7855c325fd2b55a10a44628a3c1ad3311c04127a"},
+    {file = "multiprocess-0.70.16-py312-none-any.whl", hash = "sha256:fc0544c531920dde3b00c29863377f87e1632601092ea2daca74e4beb40faa2e"},
+    {file = "multiprocess-0.70.16-py38-none-any.whl", hash = "sha256:a71d82033454891091a226dfc319d0cfa8019a4e888ef9ca910372a446de4435"},
+    {file = "multiprocess-0.70.16-py39-none-any.whl", hash = "sha256:a0bafd3ae1b732eac64be2e72038231c1ba97724b60b09400d68f229fcc2fbf3"},
+    {file = "multiprocess-0.70.16.tar.gz", hash = "sha256:161af703d4652a0e1410be6abccecde4a7ddffd19341be0a7011b94aeb171ac1"},
 ]
 
 [package.dependencies]
-dill = ">=0.3.7"
+dill = ">=0.3.8"
 
 [[package]]
 name = "murmurhash"
@@ -3630,13 +3633,13 @@ setuptools = "*"
 
 [[package]]
 name = "notebook"
-version = "7.1.0"
+version = "7.1.1"
 description = "Jupyter Notebook - A web-based notebook environment for interactive computing"
 optional = false
 python-versions = ">=3.8"
 files = [
-    {file = "notebook-7.1.0-py3-none-any.whl", hash = "sha256:a8fa4ccb5e5fe220f29d9900337efd7752bc6f2efe004d6f320db01f7743adc9"},
-    {file = "notebook-7.1.0.tar.gz", hash = "sha256:99caf01ff166b1cc86355c9b37c1ba9bf566c1d7fc4ab57bb6f8f24e36c4260e"},
+    {file = "notebook-7.1.1-py3-none-any.whl", hash = "sha256:197d8e0595acabf4005851c8716e952a81b405f7aefb648067a761fbde267ce7"},
+    {file = "notebook-7.1.1.tar.gz", hash = "sha256:818e7420fa21f402e726afb9f02df7f3c10f294c02e383ed19852866c316108b"},
 ]
 
 [package.dependencies]
@@ -3826,7 +3829,7 @@ files = [
 
 [[package]]
 name = "nvidia-nvjitlink-cu12"
-version = "12.3.101"
+version = "12.4.99"
 description = "Nvidia JIT LTO Library"
 optional = true
 python-versions = ">=3"
@@ -3890,36 +3893,36 @@ reference = ["Pillow", "google-re2"]
 
 [[package]]
 name = "onnxruntime"
-version = "1.17.0"
+version = "1.17.1"
 description = "ONNX Runtime is a runtime accelerator for Machine Learning models"
 optional = true
 python-versions = "*"
 files = [
-    {file = "onnxruntime-1.17.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:d2b22a25a94109cc983443116da8d9805ced0256eb215c5e6bc6dcbabefeab96"},
-    {file = "onnxruntime-1.17.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4c87d83c6f58d1af2675fc99e3dc810f2dbdb844bcefd0c1b7573632661f6fc"},
-    {file = "onnxruntime-1.17.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dba55723bf9b835e358f48c98a814b41692c393eb11f51e02ece0625c756b797"},
-    {file = "onnxruntime-1.17.0-cp310-cp310-win32.whl", hash = "sha256:ee48422349cc500273beea7607e33c2237909f58468ae1d6cccfc4aecd158565"},
-    {file = "onnxruntime-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:f34cc46553359293854e38bdae2ab1be59543aad78a6317e7746d30e311110c3"},
-    {file = "onnxruntime-1.17.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:16d26badd092c8c257fa57c458bb600d96dc15282c647ccad0ed7b2732e6c03b"},
-    {file = "onnxruntime-1.17.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6f1273bebcdb47ed932d076c85eb9488bc4768fcea16d5f2747ca692fad4f9d3"},
-    {file = "onnxruntime-1.17.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cb60fd3c2c1acd684752eb9680e89ae223e9801a9b0e0dc7b28adabe45a2e380"},
-    {file = "onnxruntime-1.17.0-cp311-cp311-win32.whl", hash = "sha256:4b038324586bc905299e435f7c00007e6242389c856b82fe9357fdc3b1ef2bdc"},
-    {file = "onnxruntime-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:93d39b3fa1ee01f034f098e1c7769a811a21365b4883f05f96c14a2b60c6028b"},
-    {file = "onnxruntime-1.17.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:90c0890e36f880281c6c698d9bc3de2afbeee2f76512725ec043665c25c67d21"},
-    {file = "onnxruntime-1.17.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7466724e809a40e986b1637cba156ad9fc0d1952468bc00f79ef340bc0199552"},
-    {file = "onnxruntime-1.17.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d47bee7557a8b99c8681b6882657a515a4199778d6d5e24e924d2aafcef55b0a"},
-    {file = "onnxruntime-1.17.0-cp312-cp312-win32.whl", hash = "sha256:bb1bf1ee575c665b8bbc3813ab906e091a645a24ccc210be7932154b8260eca1"},
-    {file = "onnxruntime-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:ac2f286da3494b29b4186ca193c7d4e6a2c1f770c4184c7192c5da142c3dec28"},
-    {file = "onnxruntime-1.17.0-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1ec485643b93e0a3896c655eb2426decd63e18a278bb7ccebc133b340723624f"},
-    {file = "onnxruntime-1.17.0-cp38-cp38-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:83c35809cda898c5a11911c69ceac8a2ac3925911854c526f73bad884582f911"},
-    {file = "onnxruntime-1.17.0-cp38-cp38-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fa464aa4d81df818375239e481887b656e261377d5b6b9a4692466f5f3261edc"},
-    {file = "onnxruntime-1.17.0-cp38-cp38-win32.whl", hash = "sha256:b7b337cd0586f7836601623cbd30a443df9528ef23965860d11c753ceeb009f2"},
-    {file = "onnxruntime-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:fbb9faaf51d01aa2c147ef52524d9326744c852116d8005b9041809a71838878"},
-    {file = "onnxruntime-1.17.0-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:5a06ab84eaa350bf64b1d747b33ccf10da64221ed1f38f7287f15eccbec81603"},
-    {file = "onnxruntime-1.17.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d3d11db2c8242766212a68d0b139745157da7ce53bd96ba349a5c65e5a02357"},
-    {file = "onnxruntime-1.17.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5632077c3ab8b0cd4f74b0af9c4e924be012b1a7bcd7daa845763c6c6bf14b7d"},
-    {file = "onnxruntime-1.17.0-cp39-cp39-win32.whl", hash = "sha256:61a12732cba869b3ad2d4e29ab6cb62c7a96f61b8c213f7fcb961ba412b70b37"},
-    {file = "onnxruntime-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:461fa0fc7d9c392c352b6cccdedf44d818430f3d6eacd924bb804fdea2dcfd02"},
+    {file = "onnxruntime-1.17.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:d43ac17ac4fa3c9096ad3c0e5255bb41fd134560212dc124e7f52c3159af5d21"},
+    {file = "onnxruntime-1.17.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:55b5e92a4c76a23981c998078b9bf6145e4fb0b016321a8274b1607bd3c6bd35"},
+    {file = "onnxruntime-1.17.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ebbcd2bc3a066cf54e6f18c75708eb4d309ef42be54606d22e5bdd78afc5b0d7"},
+    {file = "onnxruntime-1.17.1-cp310-cp310-win32.whl", hash = "sha256:5e3716b5eec9092e29a8d17aab55e737480487deabfca7eac3cd3ed952b6ada9"},
+    {file = "onnxruntime-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:fbb98cced6782ae1bb799cc74ddcbbeeae8819f3ad1d942a74d88e72b6511337"},
+    {file = "onnxruntime-1.17.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:36fd6f87a1ecad87e9c652e42407a50fb305374f9a31d71293eb231caae18784"},
+    {file = "onnxruntime-1.17.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:99a8bddeb538edabc524d468edb60ad4722cff8a49d66f4e280c39eace70500b"},
+    {file = "onnxruntime-1.17.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd7fddb4311deb5a7d3390cd8e9b3912d4d963efbe4dfe075edbaf18d01c024e"},
+    {file = "onnxruntime-1.17.1-cp311-cp311-win32.whl", hash = "sha256:606a7cbfb6680202b0e4f1890881041ffc3ac6e41760a25763bd9fe146f0b335"},
+    {file = "onnxruntime-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:53e4e06c0a541696ebdf96085fd9390304b7b04b748a19e02cf3b35c869a1e76"},
+    {file = "onnxruntime-1.17.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:40f08e378e0f85929712a2b2c9b9a9cc400a90c8a8ca741d1d92c00abec60843"},
+    {file = "onnxruntime-1.17.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ac79da6d3e1bb4590f1dad4bb3c2979d7228555f92bb39820889af8b8e6bd472"},
+    {file = "onnxruntime-1.17.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ae9ba47dc099004e3781f2d0814ad710a13c868c739ab086fc697524061695ea"},
+    {file = "onnxruntime-1.17.1-cp312-cp312-win32.whl", hash = "sha256:2dff1a24354220ac30e4a4ce2fb1df38cb1ea59f7dac2c116238d63fe7f4c5ff"},
+    {file = "onnxruntime-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:6226a5201ab8cafb15e12e72ff2a4fc8f50654e8fa5737c6f0bd57c5ff66827e"},
+    {file = "onnxruntime-1.17.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:cd0c07c0d1dfb8629e820b05fda5739e4835b3b82faf43753d2998edf2cf00aa"},
+    {file = "onnxruntime-1.17.1-cp38-cp38-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:617ebdf49184efa1ba6e4467e602fbfa029ed52c92f13ce3c9f417d303006381"},
+    {file = "onnxruntime-1.17.1-cp38-cp38-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9dae9071e3facdf2920769dceee03b71c684b6439021defa45b830d05e148924"},
+    {file = "onnxruntime-1.17.1-cp38-cp38-win32.whl", hash = "sha256:835d38fa1064841679433b1aa8138b5e1218ddf0cfa7a3ae0d056d8fd9cec713"},
+    {file = "onnxruntime-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:96621e0c555c2453bf607606d08af3f70fbf6f315230c28ddea91754e17ad4e6"},
+    {file = "onnxruntime-1.17.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:7a9539935fb2d78ebf2cf2693cad02d9930b0fb23cdd5cf37a7df813e977674d"},
+    {file = "onnxruntime-1.17.1-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:45c6a384e9d9a29c78afff62032a46a993c477b280247a7e335df09372aedbe9"},
+    {file = "onnxruntime-1.17.1-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4e19f966450f16863a1d6182a685ca33ae04d7772a76132303852d05b95411ea"},
+    {file = "onnxruntime-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e2ae712d64a42aac29ed7a40a426cb1e624a08cfe9273dcfe681614aa65b07dc"},
+    {file = "onnxruntime-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:f7e9f7fb049825cdddf4a923cfc7c649d84d63c0134315f8e0aa9e0c3004672c"},
 ]
 
 [package.dependencies]
@@ -3932,13 +3935,13 @@ sympy = "*"
 
 [[package]]
 name = "openai"
-version = "1.12.0"
+version = "1.13.3"
 description = "The official Python library for the openai API"
 optional = false
 python-versions = ">=3.7.1"
 files = [
-    {file = "openai-1.12.0-py3-none-any.whl", hash = "sha256:a54002c814e05222e413664f651b5916714e4700d041d5cf5724d3ae1a3e3481"},
-    {file = "openai-1.12.0.tar.gz", hash = "sha256:99c5d257d09ea6533d689d1cc77caa0ac679fa21efef8893d8b0832a86877f1b"},
+    {file = "openai-1.13.3-py3-none-any.whl", hash = "sha256:5769b62abd02f350a8dd1a3a242d8972c947860654466171d60fb0972ae0a41c"},
+    {file = "openai-1.13.3.tar.gz", hash = "sha256:ff6c6b3bc7327e715e4b3592a923a5a1c7519ff5dd764a83d69f633d49e77a7b"},
 ]
 
 [package.dependencies]
@@ -3955,13 +3958,13 @@ datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"]
 
 [[package]]
 name = "optimum"
-version = "1.16.2"
+version = "1.17.1"
 description = "Optimum Library is an extension of the Hugging Face Transformers library, providing a framework to integrate third-party libraries from Hardware Partners and interface with their specific functionality."
 optional = true
 python-versions = ">=3.7.0"
 files = [
-    {file = "optimum-1.16.2-py3-none-any.whl", hash = "sha256:29ad8fe53b565646bb06d8779c398e0149d220570654acdf5e3790eb1aef790d"},
-    {file = "optimum-1.16.2.tar.gz", hash = "sha256:0ac278c94b94888ea3b68f6a426c836900621d29db1d176587a584fd43600ba9"},
+    {file = "optimum-1.17.1-py3-none-any.whl", hash = "sha256:508bc55db3c9434f4e8d5a30c39a46ac63c4cdb45bcc5a641b6c1c77cae88d23"},
+    {file = "optimum-1.17.1.tar.gz", hash = "sha256:e59af717e8691b11903fe2cfb8c6efd6f6798b0417f3e70d231e578a02448ceb"},
 ]
 
 [package.dependencies]
@@ -3984,7 +3987,7 @@ transformers = {version = ">=4.26.0", extras = ["sentencepiece"]}
 [package.extras]
 amd = ["optimum-amd"]
 benchmark = ["evaluate (>=0.2.0)", "optuna", "scikit-learn", "seqeval", "torchvision", "tqdm"]
-dev = ["Pillow", "accelerate", "black (>=23.1,<24.0)", "diffusers (>=0.17.0)", "einops", "invisible-watermark", "parameterized", "pytest", "pytest-xdist", "requests", "ruff (==0.1.5)", "sacremoses", "torchaudio", "torchvision"]
+dev = ["Pillow", "accelerate", "black (>=23.1,<24.0)", "diffusers (>=0.17.0)", "einops", "invisible-watermark", "parameterized", "pytest", "pytest-xdist", "requests", "rjieba", "ruff (==0.1.5)", "sacremoses", "scikit-learn", "timm", "torchaudio", "torchvision"]
 diffusers = ["diffusers"]
 doc-build = ["accelerate"]
 exporters = ["onnx", "onnxruntime", "timm"]
@@ -3992,17 +3995,17 @@ exporters-gpu = ["onnx", "onnxruntime-gpu", "timm"]
 exporters-tf = ["h5py", "numpy (<1.24.0)", "onnx", "onnxruntime", "tensorflow (>=2.4,<=2.12.1)", "tf2onnx", "timm"]
 furiosa = ["optimum-furiosa"]
 graphcore = ["optimum-graphcore"]
-habana = ["optimum-habana", "transformers (>=4.33.0,<4.35.0)"]
-intel = ["optimum-intel (>=1.12.0)"]
-neural-compressor = ["optimum-intel[neural-compressor] (>=1.12.0)"]
+habana = ["optimum-habana", "transformers (>=4.37.0,<4.38.0)"]
+intel = ["optimum-intel (>=1.15.0)"]
+neural-compressor = ["optimum-intel[neural-compressor] (>=1.15.0)"]
 neuron = ["optimum-neuron[neuron]"]
 neuronx = ["optimum-neuron[neuronx]"]
-nncf = ["optimum-intel[nncf] (>=1.12.0)"]
+nncf = ["optimum-intel[nncf] (>=1.15.0)"]
 onnxruntime = ["datasets (>=1.2.1)", "evaluate", "onnx", "onnxruntime (>=1.11.0)", "protobuf (>=3.20.1)"]
 onnxruntime-gpu = ["accelerate", "datasets (>=1.2.1)", "evaluate", "onnx", "onnxruntime-gpu (>=1.11.0)", "protobuf (>=3.20.1)"]
-openvino = ["optimum-intel[openvino] (>=1.12.0)"]
+openvino = ["optimum-intel[openvino] (>=1.15.0)"]
 quality = ["black (>=23.1,<24.0)", "ruff (==0.1.5)"]
-tests = ["Pillow", "accelerate", "diffusers (>=0.17.0)", "einops", "invisible-watermark", "parameterized", "pytest", "pytest-xdist", "requests", "sacremoses", "torchaudio", "torchvision"]
+tests = ["Pillow", "accelerate", "diffusers (>=0.17.0)", "einops", "invisible-watermark", "parameterized", "pytest", "pytest-xdist", "requests", "rjieba", "sacremoses", "scikit-learn", "timm", "torchaudio", "torchvision"]
 
 [[package]]
 name = "orjson"
@@ -4205,12 +4208,12 @@ ptyprocess = ">=0.5"
 
 [[package]]
 name = "pgvector"
-version = "0.1.8"
+version = "0.2.5"
 description = "pgvector support for Python"
 optional = true
-python-versions = ">=3.6"
+python-versions = ">=3.8"
 files = [
-    {file = "pgvector-0.1.8-py2.py3-none-any.whl", hash = "sha256:99dce3a6580ef73863edb9b8441937671f4e1a09383826e6b0838176cd441a96"},
+    {file = "pgvector-0.2.5-py2.py3-none-any.whl", hash = "sha256:5e5e93ec4d3c45ab1fa388729d56c602f6966296e19deee8878928c6d567e41b"},
 ]
 
 [package.dependencies]
@@ -4629,52 +4632,63 @@ tests = ["pytest"]
 
 [[package]]
 name = "pyarrow"
-version = "15.0.0"
+version = "15.0.1"
 description = "Python library for Apache Arrow"
 optional = true
 python-versions = ">=3.8"
 files = [
-    {file = "pyarrow-15.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:0a524532fd6dd482edaa563b686d754c70417c2f72742a8c990b322d4c03a15d"},
-    {file = "pyarrow-15.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:60a6bdb314affa9c2e0d5dddf3d9cbb9ef4a8dddaa68669975287d47ece67642"},
-    {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:66958fd1771a4d4b754cd385835e66a3ef6b12611e001d4e5edfcef5f30391e2"},
-    {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f500956a49aadd907eaa21d4fff75f73954605eaa41f61cb94fb008cf2e00c6"},
-    {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6f87d9c4f09e049c2cade559643424da84c43a35068f2a1c4653dc5b1408a929"},
-    {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:85239b9f93278e130d86c0e6bb455dcb66fc3fd891398b9d45ace8799a871a1e"},
-    {file = "pyarrow-15.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b8d43e31ca16aa6e12402fcb1e14352d0d809de70edd185c7650fe80e0769e3"},
-    {file = "pyarrow-15.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:fa7cd198280dbd0c988df525e50e35b5d16873e2cdae2aaaa6363cdb64e3eec5"},
-    {file = "pyarrow-15.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8780b1a29d3c8b21ba6b191305a2a607de2e30dab399776ff0aa09131e266340"},
-    {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe0ec198ccc680f6c92723fadcb97b74f07c45ff3fdec9dd765deb04955ccf19"},
-    {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036a7209c235588c2f07477fe75c07e6caced9b7b61bb897c8d4e52c4b5f9555"},
-    {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2bd8a0e5296797faf9a3294e9fa2dc67aa7f10ae2207920dbebb785c77e9dbe5"},
-    {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e8ebed6053dbe76883a822d4e8da36860f479d55a762bd9e70d8494aed87113e"},
-    {file = "pyarrow-15.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:17d53a9d1b2b5bd7d5e4cd84d018e2a45bc9baaa68f7e6e3ebed45649900ba99"},
-    {file = "pyarrow-15.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9950a9c9df24090d3d558b43b97753b8f5867fb8e521f29876aa021c52fda351"},
-    {file = "pyarrow-15.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:003d680b5e422d0204e7287bb3fa775b332b3fce2996aa69e9adea23f5c8f970"},
-    {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f75fce89dad10c95f4bf590b765e3ae98bcc5ba9f6ce75adb828a334e26a3d40"},
-    {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca9cb0039923bec49b4fe23803807e4ef39576a2bec59c32b11296464623dc2"},
-    {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9ed5a78ed29d171d0acc26a305a4b7f83c122d54ff5270810ac23c75813585e4"},
-    {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:6eda9e117f0402dfcd3cd6ec9bfee89ac5071c48fc83a84f3075b60efa96747f"},
-    {file = "pyarrow-15.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a3a6180c0e8f2727e6f1b1c87c72d3254cac909e609f35f22532e4115461177"},
-    {file = "pyarrow-15.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:19a8918045993349b207de72d4576af0191beef03ea655d8bdb13762f0cd6eac"},
-    {file = "pyarrow-15.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0ec076b32bacb6666e8813a22e6e5a7ef1314c8069d4ff345efa6246bc38593"},
-    {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5db1769e5d0a77eb92344c7382d6543bea1164cca3704f84aa44e26c67e320fb"},
-    {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2617e3bf9df2a00020dd1c1c6dce5cc343d979efe10bc401c0632b0eef6ef5b"},
-    {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:d31c1d45060180131caf10f0f698e3a782db333a422038bf7fe01dace18b3a31"},
-    {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:c8c287d1d479de8269398b34282e206844abb3208224dbdd7166d580804674b7"},
-    {file = "pyarrow-15.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:07eb7f07dc9ecbb8dace0f58f009d3a29ee58682fcdc91337dfeb51ea618a75b"},
-    {file = "pyarrow-15.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:47af7036f64fce990bb8a5948c04722e4e3ea3e13b1007ef52dfe0aa8f23cf7f"},
-    {file = "pyarrow-15.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93768ccfff85cf044c418bfeeafce9a8bb0cee091bd8fd19011aff91e58de540"},
-    {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6ee87fd6892700960d90abb7b17a72a5abb3b64ee0fe8db6c782bcc2d0dc0b4"},
-    {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:001fca027738c5f6be0b7a3159cc7ba16a5c52486db18160909a0831b063c4e4"},
-    {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:d1c48648f64aec09accf44140dccb92f4f94394b8d79976c426a5b79b11d4fa7"},
-    {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:972a0141be402bb18e3201448c8ae62958c9c7923dfaa3b3d4530c835ac81aed"},
-    {file = "pyarrow-15.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:f01fc5cf49081426429127aa2d427d9d98e1cb94a32cb961d583a70b7c4504e6"},
-    {file = "pyarrow-15.0.0.tar.gz", hash = "sha256:876858f549d540898f927eba4ef77cd549ad8d24baa3207cf1b72e5788b50e83"},
+    {file = "pyarrow-15.0.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:c2ddb3be5ea938c329a84171694fc230b241ce1b6b0ff1a0280509af51c375fa"},
+    {file = "pyarrow-15.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7543ea88a0ff72f8e6baaf9bfdbec2c62aeabdbede9e4a571c71cc3bc43b6302"},
+    {file = "pyarrow-15.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1519e218a6941fc074e4501088d891afcb2adf77c236e03c34babcf3d6a0d1c7"},
+    {file = "pyarrow-15.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28cafa86e1944761970d3b3fc0411b14ff9b5c2b73cd22aaf470d7a3976335f5"},
+    {file = "pyarrow-15.0.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:be5c3d463e33d03eab496e1af7916b1d44001c08f0f458ad27dc16093a020638"},
+    {file = "pyarrow-15.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:47b1eda15d3aa3f49a07b1808648e1397e5dc6a80a30bf87faa8e2d02dad7ac3"},
+    {file = "pyarrow-15.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e524a31be7db22deebbbcf242b189063ab9a7652c62471d296b31bc6e3cae77b"},
+    {file = "pyarrow-15.0.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:a476fefe8bdd56122fb0d4881b785413e025858803cc1302d0d788d3522b374d"},
+    {file = "pyarrow-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:309e6191be385f2e220586bfdb643f9bb21d7e1bc6dd0a6963dc538e347b2431"},
+    {file = "pyarrow-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83bc586903dbeb4365cbc72b602f99f70b96c5882e5dfac5278813c7d624ca3c"},
+    {file = "pyarrow-15.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07e652daac6d8b05280cd2af31c0fb61a4490ec6a53dc01588014d9fa3fdbee9"},
+    {file = "pyarrow-15.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:abad2e08652df153a72177ce20c897d083b0c4ebeec051239e2654ddf4d3c996"},
+    {file = "pyarrow-15.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:cde663352bc83ad75ba7b3206e049ca1a69809223942362a8649e37bd22f9e3b"},
+    {file = "pyarrow-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:1b6e237dd7a08482a8b8f3f6512d258d2460f182931832a8c6ef3953203d31e1"},
+    {file = "pyarrow-15.0.1-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:7bd167536ee23192760b8c731d39b7cfd37914c27fd4582335ffd08450ff799d"},
+    {file = "pyarrow-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7c08bb31eb2984ba5c3747d375bb522e7e536b8b25b149c9cb5e1c49b0ccb736"},
+    {file = "pyarrow-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0f9c1d630ed2524bd1ddf28ec92780a7b599fd54704cd653519f7ff5aec177a"},
+    {file = "pyarrow-15.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5186048493395220550bca7b524420471aac2d77af831f584ce132680f55c3df"},
+    {file = "pyarrow-15.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:31dc30c7ec8958da3a3d9f31d6c3630429b2091ede0ecd0d989fd6bec129f0e4"},
+    {file = "pyarrow-15.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:3f111a014fb8ac2297b43a74bf4495cc479a332908f7ee49cb7cbd50714cb0c1"},
+    {file = "pyarrow-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:a6d1f7c15d7f68f08490d0cb34611497c74285b8a6bbeab4ef3fc20117310983"},
+    {file = "pyarrow-15.0.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:9ad931b996f51c2f978ed517b55cb3c6078272fb4ec579e3da5a8c14873b698d"},
+    {file = "pyarrow-15.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:738f6b53ab1c2f66b2bde8a1d77e186aeaab702d849e0dfa1158c9e2c030add3"},
+    {file = "pyarrow-15.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c1c3fc16bc74e33bf8f1e5a212938ed8d88e902f372c4dac6b5bad328567d2f"},
+    {file = "pyarrow-15.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1fa92512128f6c1b8dde0468c1454dd70f3bff623970e370d52efd4d24fd0be"},
+    {file = "pyarrow-15.0.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:b4157f307c202cbbdac147d9b07447a281fa8e63494f7fc85081da351ec6ace9"},
+    {file = "pyarrow-15.0.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:b75e7da26f383787f80ad76143b44844ffa28648fcc7099a83df1538c078d2f2"},
+    {file = "pyarrow-15.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:3a99eac76ae14096c209850935057b9e8ce97a78397c5cde8724674774f34e5d"},
+    {file = "pyarrow-15.0.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:dd532d3177e031e9b2d2df19fd003d0cc0520d1747659fcabbd4d9bb87de508c"},
+    {file = "pyarrow-15.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ce8c89848fd37e5313fc2ce601483038ee5566db96ba0808d5883b2e2e55dc53"},
+    {file = "pyarrow-15.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:862eac5e5f3b6477f7a92b2f27e560e1f4e5e9edfca9ea9da8a7478bb4abd5ce"},
+    {file = "pyarrow-15.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f0ea3a29cd5cb99bf14c1c4533eceaa00ea8fb580950fb5a89a5c771a994a4e"},
+    {file = "pyarrow-15.0.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:bb902f780cfd624b2e8fd8501fadab17618fdb548532620ef3d91312aaf0888a"},
+    {file = "pyarrow-15.0.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:4f87757f02735a6bb4ad2e1b98279ac45d53b748d5baf52401516413007c6999"},
+    {file = "pyarrow-15.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:efd3816c7fbfcbd406ac0f69873cebb052effd7cdc153ae5836d1b00845845d7"},
+    {file = "pyarrow-15.0.1.tar.gz", hash = "sha256:21d812548d39d490e0c6928a7c663f37b96bf764034123d4b4ab4530ecc757a9"},
 ]
 
 [package.dependencies]
 numpy = ">=1.16.6,<2"
 
+[[package]]
+name = "pyarrow-hotfix"
+version = "0.6"
+description = ""
+optional = true
+python-versions = ">=3.5"
+files = [
+    {file = "pyarrow_hotfix-0.6-py3-none-any.whl", hash = "sha256:dcc9ae2d220dff0083be6a9aa8e0cdee5182ad358d4931fce825c545e5c89178"},
+    {file = "pyarrow_hotfix-0.6.tar.gz", hash = "sha256:79d3e030f7ff890d408a100ac16d6f00b14d44a502d7897cd9fc3e3a534e9945"},
+]
+
 [[package]]
 name = "pyasn1"
 version = "0.5.1"
@@ -4840,93 +4854,93 @@ testutils = ["gitpython (>3)"]
 
 [[package]]
 name = "pymongo"
-version = "4.6.1"
+version = "4.6.2"
 description = "Python driver for MongoDB <http://www.mongodb.org>"
 optional = false
 python-versions = ">=3.7"
 files = [
-    {file = "pymongo-4.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4344c30025210b9fa80ec257b0e0aab5aa1d5cca91daa70d82ab97b482cc038e"},
-    {file = "pymongo-4.6.1-cp310-cp310-manylinux1_i686.whl", hash = "sha256:1c5654bb8bb2bdb10e7a0bc3c193dd8b49a960b9eebc4381ff5a2043f4c3c441"},
-    {file = "pymongo-4.6.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:eaf2f65190c506def2581219572b9c70b8250615dc918b3b7c218361a51ec42e"},
-    {file = "pymongo-4.6.1-cp310-cp310-manylinux2014_i686.whl", hash = "sha256:262356ea5fcb13d35fb2ab6009d3927bafb9504ef02339338634fffd8a9f1ae4"},
-    {file = "pymongo-4.6.1-cp310-cp310-manylinux2014_ppc64le.whl", hash = "sha256:2dd2f6960ee3c9360bed7fb3c678be0ca2d00f877068556785ec2eb6b73d2414"},
-    {file = "pymongo-4.6.1-cp310-cp310-manylinux2014_s390x.whl", hash = "sha256:ff925f1cca42e933376d09ddc254598f8c5fcd36efc5cac0118bb36c36217c41"},
-    {file = "pymongo-4.6.1-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:3cadf7f4c8e94d8a77874b54a63c80af01f4d48c4b669c8b6867f86a07ba994f"},
-    {file = "pymongo-4.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55dac73316e7e8c2616ba2e6f62b750918e9e0ae0b2053699d66ca27a7790105"},
-    {file = "pymongo-4.6.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:154b361dcb358ad377d5d40df41ee35f1cc14c8691b50511547c12404f89b5cb"},
-    {file = "pymongo-4.6.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2940aa20e9cc328e8ddeacea8b9a6f5ddafe0b087fedad928912e787c65b4909"},
-    {file = "pymongo-4.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:010bc9aa90fd06e5cc52c8fac2c2fd4ef1b5f990d9638548dde178005770a5e8"},
-    {file = "pymongo-4.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e470fa4bace5f50076c32f4b3cc182b31303b4fefb9b87f990144515d572820b"},
-    {file = "pymongo-4.6.1-cp310-cp310-win32.whl", hash = "sha256:da08ea09eefa6b960c2dd9a68ec47949235485c623621eb1d6c02b46765322ac"},
-    {file = "pymongo-4.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:13d613c866f9f07d51180f9a7da54ef491d130f169e999c27e7633abe8619ec9"},
-    {file = "pymongo-4.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6a0ae7a48a6ef82ceb98a366948874834b86c84e288dbd55600c1abfc3ac1d88"},
-    {file = "pymongo-4.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bd94c503271e79917b27c6e77f7c5474da6930b3fb9e70a12e68c2dff386b9a"},
-    {file = "pymongo-4.6.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2d4ccac3053b84a09251da8f5350bb684cbbf8c8c01eda6b5418417d0a8ab198"},
-    {file = "pymongo-4.6.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:349093675a2d3759e4fb42b596afffa2b2518c890492563d7905fac503b20daa"},
-    {file = "pymongo-4.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88beb444fb438385e53dc9110852910ec2a22f0eab7dd489e827038fdc19ed8d"},
-    {file = "pymongo-4.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8e62d06e90f60ea2a3d463ae51401475568b995bafaffd81767d208d84d7bb1"},
-    {file = "pymongo-4.6.1-cp311-cp311-win32.whl", hash = "sha256:5556e306713e2522e460287615d26c0af0fe5ed9d4f431dad35c6624c5d277e9"},
-    {file = "pymongo-4.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:b10d8cda9fc2fcdcfa4a000aa10413a2bf8b575852cd07cb8a595ed09689ca98"},
-    {file = "pymongo-4.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b435b13bb8e36be11b75f7384a34eefe487fe87a6267172964628e2b14ecf0a7"},
-    {file = "pymongo-4.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e438417ce1dc5b758742e12661d800482200b042d03512a8f31f6aaa9137ad40"},
-    {file = "pymongo-4.6.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b47ebd89e69fbf33d1c2df79759d7162fc80c7652dacfec136dae1c9b3afac7"},
-    {file = "pymongo-4.6.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bbed8cccebe1169d45cedf00461b2842652d476d2897fd1c42cf41b635d88746"},
-    {file = "pymongo-4.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c30a9e06041fbd7a7590693ec5e407aa8737ad91912a1e70176aff92e5c99d20"},
-    {file = "pymongo-4.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8729dbf25eb32ad0dc0b9bd5e6a0d0b7e5c2dc8ec06ad171088e1896b522a74"},
-    {file = "pymongo-4.6.1-cp312-cp312-win32.whl", hash = "sha256:3177f783ae7e08aaf7b2802e0df4e4b13903520e8380915e6337cdc7a6ff01d8"},
-    {file = "pymongo-4.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:00c199e1c593e2c8b033136d7a08f0c376452bac8a896c923fcd6f419e07bdd2"},
-    {file = "pymongo-4.6.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6dcc95f4bb9ed793714b43f4f23a7b0c57e4ef47414162297d6f650213512c19"},
-    {file = "pymongo-4.6.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:13552ca505366df74e3e2f0a4f27c363928f3dff0eef9f281eb81af7f29bc3c5"},
-    {file = "pymongo-4.6.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:77e0df59b1a4994ad30c6d746992ae887f9756a43fc25dec2db515d94cf0222d"},
-    {file = "pymongo-4.6.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3a7f02a58a0c2912734105e05dedbee4f7507e6f1bd132ebad520be0b11d46fd"},
-    {file = "pymongo-4.6.1-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:026a24a36394dc8930cbcb1d19d5eb35205ef3c838a7e619e04bd170713972e7"},
-    {file = "pymongo-4.6.1-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:3b287e814a01deddb59b88549c1e0c87cefacd798d4afc0c8bd6042d1c3d48aa"},
-    {file = "pymongo-4.6.1-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:9a710c184ba845afb05a6f876edac8f27783ba70e52d5eaf939f121fc13b2f59"},
-    {file = "pymongo-4.6.1-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:30b2c9caf3e55c2e323565d1f3b7e7881ab87db16997dc0cbca7c52885ed2347"},
-    {file = "pymongo-4.6.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff62ba8ff70f01ab4fe0ae36b2cb0b5d1f42e73dfc81ddf0758cd9f77331ad25"},
-    {file = "pymongo-4.6.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:547dc5d7f834b1deefda51aedb11a7af9c51c45e689e44e14aa85d44147c7657"},
-    {file = "pymongo-4.6.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1de3c6faf948f3edd4e738abdb4b76572b4f4fdfc1fed4dad02427e70c5a6219"},
-    {file = "pymongo-4.6.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2831e05ce0a4df10c4ac5399ef50b9a621f90894c2a4d2945dc5658765514ed"},
-    {file = "pymongo-4.6.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:144a31391a39a390efce0c5ebcaf4bf112114af4384c90163f402cec5ede476b"},
-    {file = "pymongo-4.6.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33bb16a07d3cc4e0aea37b242097cd5f7a156312012455c2fa8ca396953b11c4"},
-    {file = "pymongo-4.6.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b7b1a83ce514700276a46af3d9e481ec381f05b64939effc9065afe18456a6b9"},
-    {file = "pymongo-4.6.1-cp37-cp37m-win32.whl", hash = "sha256:3071ec998cc3d7b4944377e5f1217c2c44b811fae16f9a495c7a1ce9b42fb038"},
-    {file = "pymongo-4.6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2346450a075625c4d6166b40a013b605a38b6b6168ce2232b192a37fb200d588"},
-    {file = "pymongo-4.6.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:061598cbc6abe2f382ab64c9caa83faa2f4c51256f732cdd890bcc6e63bfb67e"},
-    {file = "pymongo-4.6.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:d483793a384c550c2d12cb794ede294d303b42beff75f3b3081f57196660edaf"},
-    {file = "pymongo-4.6.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:f9756f1d25454ba6a3c2f1ef8b7ddec23e5cdeae3dc3c3377243ae37a383db00"},
-    {file = "pymongo-4.6.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:1ed23b0e2dac6f84f44c8494fbceefe6eb5c35db5c1099f56ab78fc0d94ab3af"},
-    {file = "pymongo-4.6.1-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:3d18a9b9b858ee140c15c5bfcb3e66e47e2a70a03272c2e72adda2482f76a6ad"},
-    {file = "pymongo-4.6.1-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:c258dbacfff1224f13576147df16ce3c02024a0d792fd0323ac01bed5d3c545d"},
-    {file = "pymongo-4.6.1-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:f7acc03a4f1154ba2643edeb13658d08598fe6e490c3dd96a241b94f09801626"},
-    {file = "pymongo-4.6.1-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:76013fef1c9cd1cd00d55efde516c154aa169f2bf059b197c263a255ba8a9ddf"},
-    {file = "pymongo-4.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f0e6a6c807fa887a0c51cc24fe7ea51bb9e496fe88f00d7930063372c3664c3"},
-    {file = "pymongo-4.6.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd1fa413f8b9ba30140de198e4f408ffbba6396864c7554e0867aa7363eb58b2"},
-    {file = "pymongo-4.6.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d219b4508f71d762368caec1fc180960569766049bbc4d38174f05e8ef2fe5b"},
-    {file = "pymongo-4.6.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27b81ecf18031998ad7db53b960d1347f8f29e8b7cb5ea7b4394726468e4295e"},
-    {file = "pymongo-4.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56816e43c92c2fa8c11dc2a686f0ca248bea7902f4a067fa6cbc77853b0f041e"},
-    {file = "pymongo-4.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef801027629c5b511cf2ba13b9be29bfee36ae834b2d95d9877818479cdc99ea"},
-    {file = "pymongo-4.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d4c2be9760b112b1caf649b4977b81b69893d75aa86caf4f0f398447be871f3c"},
-    {file = "pymongo-4.6.1-cp38-cp38-win32.whl", hash = "sha256:39d77d8bbb392fa443831e6d4ae534237b1f4eee6aa186f0cdb4e334ba89536e"},
-    {file = "pymongo-4.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:4497d49d785482cc1a44a0ddf8830b036a468c088e72a05217f5b60a9e025012"},
-    {file = "pymongo-4.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:69247f7a2835fc0984bbf0892e6022e9a36aec70e187fcfe6cae6a373eb8c4de"},
-    {file = "pymongo-4.6.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:7bb0e9049e81def6829d09558ad12d16d0454c26cabe6efc3658e544460688d9"},
-    {file = "pymongo-4.6.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:6a1810c2cbde714decf40f811d1edc0dae45506eb37298fd9d4247b8801509fe"},
-    {file = "pymongo-4.6.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:e2aced6fb2f5261b47d267cb40060b73b6527e64afe54f6497844c9affed5fd0"},
-    {file = "pymongo-4.6.1-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:d0355cff58a4ed6d5e5f6b9c3693f52de0784aa0c17119394e2a8e376ce489d4"},
-    {file = "pymongo-4.6.1-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:3c74f4725485f0a7a3862cfd374cc1b740cebe4c133e0c1425984bcdcce0f4bb"},
-    {file = "pymongo-4.6.1-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:9c79d597fb3a7c93d7c26924db7497eba06d58f88f58e586aa69b2ad89fee0f8"},
-    {file = "pymongo-4.6.1-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:8ec75f35f62571a43e31e7bd11749d974c1b5cd5ea4a8388725d579263c0fdf6"},
-    {file = "pymongo-4.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5e641f931c5cd95b376fd3c59db52770e17bec2bf86ef16cc83b3906c054845"},
-    {file = "pymongo-4.6.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9aafd036f6f2e5ad109aec92f8dbfcbe76cff16bad683eb6dd18013739c0b3ae"},
-    {file = "pymongo-4.6.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f2b856518bfcfa316c8dae3d7b412aecacf2e8ba30b149f5eb3b63128d703b9"},
-    {file = "pymongo-4.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ec31adc2e988fd7db3ab509954791bbc5a452a03c85e45b804b4bfc31fa221d"},
-    {file = "pymongo-4.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9167e735379ec43d8eafa3fd675bfbb12e2c0464f98960586e9447d2cf2c7a83"},
-    {file = "pymongo-4.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1461199b07903fc1424709efafe379205bf5f738144b1a50a08b0396357b5abf"},
-    {file = "pymongo-4.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:3094c7d2f820eecabadae76bfec02669567bbdd1730eabce10a5764778564f7b"},
-    {file = "pymongo-4.6.1-cp39-cp39-win32.whl", hash = "sha256:c91ea3915425bd4111cb1b74511cdc56d1d16a683a48bf2a5a96b6a6c0f297f7"},
-    {file = "pymongo-4.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:ef102a67ede70e1721fe27f75073b5314911dbb9bc27cde0a1c402a11531e7bd"},
-    {file = "pymongo-4.6.1.tar.gz", hash = "sha256:31dab1f3e1d0cdd57e8df01b645f52d43cc1b653ed3afd535d2891f4fc4f9712"},
+    {file = "pymongo-4.6.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7640d176ee5b0afec76a1bda3684995cb731b2af7fcfd7c7ef8dc271c5d689af"},
+    {file = "pymongo-4.6.2-cp310-cp310-manylinux1_i686.whl", hash = "sha256:4e2129ec8f72806751b621470ac5d26aaa18fae4194796621508fa0e6068278a"},
+    {file = "pymongo-4.6.2-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:c43205e85cbcbdf03cff62ad8f50426dd9d20134a915cfb626d805bab89a1844"},
+    {file = "pymongo-4.6.2-cp310-cp310-manylinux2014_i686.whl", hash = "sha256:91ddf95cedca12f115fbc5f442b841e81197d85aa3cc30b82aee3635a5208af2"},
+    {file = "pymongo-4.6.2-cp310-cp310-manylinux2014_ppc64le.whl", hash = "sha256:0fbdbf2fba1b4f5f1522e9f11e21c306e095b59a83340a69e908f8ed9b450070"},
+    {file = "pymongo-4.6.2-cp310-cp310-manylinux2014_s390x.whl", hash = "sha256:097791d5a8d44e2444e0c8c4d6e14570ac11e22bcb833808885a5db081c3dc2a"},
+    {file = "pymongo-4.6.2-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:e0b208ebec3b47ee78a5c836e2e885e8c1e10f8ffd101aaec3d63997a4bdcd04"},
+    {file = "pymongo-4.6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1849fd6f1917b4dc5dbf744b2f18e41e0538d08dd8e9ba9efa811c5149d665a3"},
+    {file = "pymongo-4.6.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa0bbbfbd1f8ebbd5facaa10f9f333b20027b240af012748555148943616fdf3"},
+    {file = "pymongo-4.6.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4522ad69a4ab0e1b46a8367d62ad3865b8cd54cf77518c157631dac1fdc97584"},
+    {file = "pymongo-4.6.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:397949a9cc85e4a1452f80b7f7f2175d557237177120954eff00bf79553e89d3"},
+    {file = "pymongo-4.6.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9d511db310f43222bc58d811037b176b4b88dc2b4617478c5ef01fea404f8601"},
+    {file = "pymongo-4.6.2-cp310-cp310-win32.whl", hash = "sha256:991e406db5da4d89fb220a94d8caaf974ffe14ce6b095957bae9273c609784a0"},
+    {file = "pymongo-4.6.2-cp310-cp310-win_amd64.whl", hash = "sha256:94637941fe343000f728e28d3fe04f1f52aec6376b67b85583026ff8dab2a0e0"},
+    {file = "pymongo-4.6.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:84593447a5c5fe7a59ba86b72c2c89d813fbac71c07757acdf162fbfd5d005b9"},
+    {file = "pymongo-4.6.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9aebddb2ec2128d5fc2fe3aee6319afef8697e0374f8a1fcca3449d6f625e7b4"},
+    {file = "pymongo-4.6.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f706c1a644ed33eaea91df0a8fb687ce572b53eeb4ff9b89270cb0247e5d0e1"},
+    {file = "pymongo-4.6.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18c422e6b08fa370ed9d8670c67e78d01f50d6517cec4522aa8627014dfa38b6"},
+    {file = "pymongo-4.6.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d002ae456a15b1d790a78bb84f87af21af1cb716a63efb2c446ab6bcbbc48ca"},
+    {file = "pymongo-4.6.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9f86ba0c781b497a3c9c886765d7b6402a0e3ae079dd517365044c89cd7abb06"},
+    {file = "pymongo-4.6.2-cp311-cp311-win32.whl", hash = "sha256:ac20dd0c7b42555837c86f5ea46505f35af20a08b9cf5770cd1834288d8bd1b4"},
+    {file = "pymongo-4.6.2-cp311-cp311-win_amd64.whl", hash = "sha256:e78af59fd0eb262c2a5f7c7d7e3b95e8596a75480d31087ca5f02f2d4c6acd19"},
+    {file = "pymongo-4.6.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:6125f73503407792c8b3f80165f8ab88a4e448d7d9234c762681a4d0b446fcb4"},
+    {file = "pymongo-4.6.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba052446a14bd714ec83ca4e77d0d97904f33cd046d7bb60712a6be25eb31dbb"},
+    {file = "pymongo-4.6.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b65433c90e07dc252b4a55dfd885ca0df94b1cf77c5b8709953ec1983aadc03"},
+    {file = "pymongo-4.6.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2160d9c8cd20ce1f76a893f0daf7c0d38af093f36f1b5c9f3dcf3e08f7142814"},
+    {file = "pymongo-4.6.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f251f287e6d42daa3654b686ce1fcb6d74bf13b3907c3ae25954978c70f2cd4"},
+    {file = "pymongo-4.6.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d7d227a60b00925dd3aeae4675575af89c661a8e89a1f7d1677e57eba4a3693c"},
+    {file = "pymongo-4.6.2-cp312-cp312-win32.whl", hash = "sha256:311794ef3ccae374aaef95792c36b0e5c06e8d5cf04a1bdb1b2bf14619ac881f"},
+    {file = "pymongo-4.6.2-cp312-cp312-win_amd64.whl", hash = "sha256:f673b64a0884edcc56073bda0b363428dc1bf4eb1b5e7d0b689f7ec6173edad6"},
+    {file = "pymongo-4.6.2-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:fe010154dfa9e428bd2fb3e9325eff2216ab20a69ccbd6b5cac6785ca2989161"},
+    {file = "pymongo-4.6.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:1f5f4cd2969197e25b67e24d5b8aa2452d381861d2791d06c493eaa0b9c9fcfe"},
+    {file = "pymongo-4.6.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:c9519c9d341983f3a1bd19628fecb1d72a48d8666cf344549879f2e63f54463b"},
+    {file = "pymongo-4.6.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:c68bf4a399e37798f1b5aa4f6c02886188ef465f4ac0b305a607b7579413e366"},
+    {file = "pymongo-4.6.2-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:a509db602462eb736666989739215b4b7d8f4bb8ac31d0bffd4be9eae96c63ef"},
+    {file = "pymongo-4.6.2-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:362a5adf6f3f938a8ff220a4c4aaa93e84ef932a409abecd837c617d17a5990f"},
+    {file = "pymongo-4.6.2-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:ee30a9d4c27a88042d0636aca0275788af09cc237ae365cd6ebb34524bddb9cc"},
+    {file = "pymongo-4.6.2-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:477914e13501bb1d4608339ee5bb618be056d2d0e7267727623516cfa902e652"},
+    {file = "pymongo-4.6.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebd343ca44982d480f1e39372c48e8e263fc6f32e9af2be456298f146a3db715"},
+    {file = "pymongo-4.6.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3797e0a628534e07a36544d2bfa69e251a578c6d013e975e9e3ed2ac41f2d95"},
+    {file = "pymongo-4.6.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97d81d357e1a2a248b3494d52ebc8bf15d223ee89d59ee63becc434e07438a24"},
+    {file = "pymongo-4.6.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed694c0d1977cb54281cb808bc2b247c17fb64b678a6352d3b77eb678ebe1bd9"},
+    {file = "pymongo-4.6.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ceaaff4b812ae368cf9774989dea81b9bbb71e5bed666feca6a9f3087c03e49"},
+    {file = "pymongo-4.6.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7dd63f7c2b3727541f7f37d0fb78d9942eb12a866180fbeb898714420aad74e2"},
+    {file = "pymongo-4.6.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e571434633f99a81e081738721bb38e697345281ed2f79c2f290f809ba3fbb2f"},
+    {file = "pymongo-4.6.2-cp37-cp37m-win32.whl", hash = "sha256:3e9f6e2f3da0a6af854a3e959a6962b5f8b43bbb8113cd0bff0421c5059b3106"},
+    {file = "pymongo-4.6.2-cp37-cp37m-win_amd64.whl", hash = "sha256:3a5280f496297537301e78bde250c96fadf4945e7b2c397d8bb8921861dd236d"},
+    {file = "pymongo-4.6.2-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:5f6bcd2d012d82d25191a911a239fd05a8a72e8c5a7d81d056c0f3520cad14d1"},
+    {file = "pymongo-4.6.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:4fa30494601a6271a8b416554bd7cde7b2a848230f0ec03e3f08d84565b4bf8c"},
+    {file = "pymongo-4.6.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:bea62f03a50f363265a7a651b4e2a4429b4f138c1864b2d83d4bf6f9851994be"},
+    {file = "pymongo-4.6.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:b2d445f1cf147331947cc35ec10342f898329f29dd1947a3f8aeaf7e0e6878d1"},
+    {file = "pymongo-4.6.2-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:5db133d6ec7a4f7fc7e2bd098e4df23d7ad949f7be47b27b515c9fb9301c61e4"},
+    {file = "pymongo-4.6.2-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:9eec7140cf7513aa770ea51505d312000c7416626a828de24318fdcc9ac3214c"},
+    {file = "pymongo-4.6.2-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:5379ca6fd325387a34cda440aec2bd031b5ef0b0aa2e23b4981945cff1dab84c"},
+    {file = "pymongo-4.6.2-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:579508536113dbd4c56e4738955a18847e8a6c41bf3c0b4ab18b51d81a6b7be8"},
+    {file = "pymongo-4.6.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3bae553ca39ed52db099d76acd5e8566096064dc7614c34c9359bb239ec4081"},
+    {file = "pymongo-4.6.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0257e0eebb50f242ca28a92ef195889a6ad03dcdde5bf1c7ab9f38b7e810801"},
+    {file = "pymongo-4.6.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbafe3a1df21eeadb003c38fc02c1abf567648b6477ec50c4a3c042dca205371"},
+    {file = "pymongo-4.6.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aaecfafb407feb6f562c7f2f5b91f22bfacba6dd739116b1912788cff7124c4a"},
+    {file = "pymongo-4.6.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e942945e9112075a84d2e2d6e0d0c98833cdcdfe48eb8952b917f996025c7ffa"},
+    {file = "pymongo-4.6.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2f7b98f8d2cf3eeebde738d080ae9b4276d7250912d9751046a9ac1efc9b1ce2"},
+    {file = "pymongo-4.6.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:8110b78fc4b37dced85081d56795ecbee6a7937966e918e05e33a3900e8ea07d"},
+    {file = "pymongo-4.6.2-cp38-cp38-win32.whl", hash = "sha256:df813f0c2c02281720ccce225edf39dc37855bf72cdfde6f789a1d1cf32ffb4b"},
+    {file = "pymongo-4.6.2-cp38-cp38-win_amd64.whl", hash = "sha256:64ec3e2dcab9af61bdbfcb1dd863c70d1b0c220b8e8ac11df8b57f80ee0402b3"},
+    {file = "pymongo-4.6.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bff601fbfcecd2166d9a2b70777c2985cb9689e2befb3278d91f7f93a0456cae"},
+    {file = "pymongo-4.6.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:f1febca6f79e91feafc572906871805bd9c271b6a2d98a8bb5499b6ace0befed"},
+    {file = "pymongo-4.6.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:d788cb5cc947d78934be26eef1623c78cec3729dc93a30c23f049b361aa6d835"},
+    {file = "pymongo-4.6.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:5c2f258489de12a65b81e1b803a531ee8cf633fa416ae84de65cd5f82d2ceb37"},
+    {file = "pymongo-4.6.2-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:fb24abcd50501b25d33a074c1790a1389b6460d2509e4b240d03fd2e5c79f463"},
+    {file = "pymongo-4.6.2-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:4d982c6db1da7cf3018183891883660ad085de97f21490d314385373f775915b"},
+    {file = "pymongo-4.6.2-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:b2dd8c874927a27995f64a3b44c890e8a944c98dec1ba79eab50e07f1e3f801b"},
+    {file = "pymongo-4.6.2-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:4993593de44c741d1e9f230f221fe623179f500765f9855936e4ff6f33571bad"},
+    {file = "pymongo-4.6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:658f6c028edaeb02761ebcaca8d44d519c22594b2a51dcbc9bd2432aa93319e3"},
+    {file = "pymongo-4.6.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:68109c13176749fbbbbbdb94dd4a58dcc604db6ea43ee300b2602154aebdd55f"},
+    {file = "pymongo-4.6.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:707d28a822b918acf941cff590affaddb42a5d640614d71367c8956623a80cbc"},
+    {file = "pymongo-4.6.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f251db26c239aec2a4d57fbe869e0a27b7f6b5384ec6bf54aeb4a6a5e7408234"},
+    {file = "pymongo-4.6.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57c05f2e310701fc17ae358caafd99b1830014e316f0242d13ab6c01db0ab1c2"},
+    {file = "pymongo-4.6.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b575fbe6396bbf21e4d0e5fd2e3cdb656dc90c930b6c5532192e9a89814f72d"},
+    {file = "pymongo-4.6.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:ca5877754f3fa6e4fe5aacf5c404575f04c2d9efc8d22ed39576ed9098d555c8"},
+    {file = "pymongo-4.6.2-cp39-cp39-win32.whl", hash = "sha256:8caa73fb19070008e851a589b744aaa38edd1366e2487284c61158c77fdf72af"},
+    {file = "pymongo-4.6.2-cp39-cp39-win_amd64.whl", hash = "sha256:3e03c732cb64b96849310e1d8688fb70d75e2571385485bf2f1e7ad1d309fa53"},
+    {file = "pymongo-4.6.2.tar.gz", hash = "sha256:ab7d01ac832a1663dad592ccbd92bb0f0775bc8f98a1923c5e1a7d7fead495af"},
 ]
 
 [package.dependencies]
@@ -4943,13 +4957,13 @@ zstd = ["zstandard"]
 
 [[package]]
 name = "pyparsing"
-version = "3.1.1"
+version = "3.1.2"
 description = "pyparsing module - Classes and methods to define and execute parsing grammars"
 optional = true
 python-versions = ">=3.6.8"
 files = [
-    {file = "pyparsing-3.1.1-py3-none-any.whl", hash = "sha256:32c7c0b711493c72ff18a981d24f28aaf9c1fb7ed5e9667c9e84e3db623bdbfb"},
-    {file = "pyparsing-3.1.1.tar.gz", hash = "sha256:ede28a1a32462f5a9705e07aea48001a08f7cf81a021585011deba701581a0db"},
+    {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"},
+    {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"},
 ]
 
 [package.extras]
@@ -4957,13 +4971,13 @@ diagrams = ["jinja2", "railroad-diagrams"]
 
 [[package]]
 name = "pypdf"
-version = "4.0.1"
+version = "4.1.0"
 description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files"
 optional = false
 python-versions = ">=3.6"
 files = [
-    {file = "pypdf-4.0.1-py3-none-any.whl", hash = "sha256:fe7c313c7e8074a516eae9d9df0111b7b9769f7a210479af7a342d27270ef81a"},
-    {file = "pypdf-4.0.1.tar.gz", hash = "sha256:871badcfe335dd68b6b563aa7646288c6b86f9ceecffb21e86341261d65d8173"},
+    {file = "pypdf-4.1.0-py3-none-any.whl", hash = "sha256:16cac912a05200099cef3f347c4c7e0aaf0a6d027603b8f9a973c0ea500dff89"},
+    {file = "pypdf-4.1.0.tar.gz", hash = "sha256:01c3257ec908676efd60a4537e525b89d48e0852bc92b4e0aa4cc646feda17cc"},
 ]
 
 [package.dependencies]
@@ -5062,13 +5076,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"]
 
 [[package]]
 name = "python-dateutil"
-version = "2.8.2"
+version = "2.9.0.post0"
 description = "Extensions to the standard Python datetime module"
 optional = false
 python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
 files = [
-    {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
-    {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
+    {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"},
+    {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"},
 ]
 
 [package.dependencies]
@@ -5135,17 +5149,17 @@ files = [
 
 [[package]]
 name = "pywinpty"
-version = "2.0.12"
+version = "2.0.13"
 description = "Pseudo terminal support for Windows from Python."
 optional = false
 python-versions = ">=3.8"
 files = [
-    {file = "pywinpty-2.0.12-cp310-none-win_amd64.whl", hash = "sha256:21319cd1d7c8844fb2c970fb3a55a3db5543f112ff9cfcd623746b9c47501575"},
-    {file = "pywinpty-2.0.12-cp311-none-win_amd64.whl", hash = "sha256:853985a8f48f4731a716653170cd735da36ffbdc79dcb4c7b7140bce11d8c722"},
-    {file = "pywinpty-2.0.12-cp312-none-win_amd64.whl", hash = "sha256:1617b729999eb6713590e17665052b1a6ae0ad76ee31e60b444147c5b6a35dca"},
-    {file = "pywinpty-2.0.12-cp38-none-win_amd64.whl", hash = "sha256:189380469ca143d06e19e19ff3fba0fcefe8b4a8cc942140a6b863aed7eebb2d"},
-    {file = "pywinpty-2.0.12-cp39-none-win_amd64.whl", hash = "sha256:7520575b6546db23e693cbd865db2764097bd6d4ef5dc18c92555904cd62c3d4"},
-    {file = "pywinpty-2.0.12.tar.gz", hash = "sha256:8197de460ae8ebb7f5d1701dfa1b5df45b157bb832e92acba316305e18ca00dd"},
+    {file = "pywinpty-2.0.13-cp310-none-win_amd64.whl", hash = "sha256:697bff211fb5a6508fee2dc6ff174ce03f34a9a233df9d8b5fe9c8ce4d5eaf56"},
+    {file = "pywinpty-2.0.13-cp311-none-win_amd64.whl", hash = "sha256:b96fb14698db1284db84ca38c79f15b4cfdc3172065b5137383910567591fa99"},
+    {file = "pywinpty-2.0.13-cp312-none-win_amd64.whl", hash = "sha256:2fd876b82ca750bb1333236ce98488c1be96b08f4f7647cfdf4129dfad83c2d4"},
+    {file = "pywinpty-2.0.13-cp38-none-win_amd64.whl", hash = "sha256:61d420c2116c0212808d31625611b51caf621fe67f8a6377e2e8b617ea1c1f7d"},
+    {file = "pywinpty-2.0.13-cp39-none-win_amd64.whl", hash = "sha256:71cb613a9ee24174730ac7ae439fd179ca34ccb8c5349e8d7b72ab5dea2c6f4b"},
+    {file = "pywinpty-2.0.13.tar.gz", hash = "sha256:c34e32351a3313ddd0d7da23d27f835c860d32fe4ac814d372a3ea9594f41dde"},
 ]
 
 [[package]]
@@ -6021,19 +6035,19 @@ files = [
 
 [[package]]
 name = "setuptools"
-version = "69.1.0"
+version = "69.1.1"
 description = "Easily download, build, install, upgrade, and uninstall Python packages"
 optional = false
 python-versions = ">=3.8"
 files = [
-    {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"},
-    {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"},
+    {file = "setuptools-69.1.1-py3-none-any.whl", hash = "sha256:02fa291a0471b3a18b2b2481ed902af520c69e8ae0919c13da936542754b4c56"},
+    {file = "setuptools-69.1.1.tar.gz", hash = "sha256:5c0806c7d9af348e6dd3777b4f4dbb42c7ad85b190104837488eab9a7c945cf8"},
 ]
 
 [package.extras]
 docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
-testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
-testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
+testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
+testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
 
 [[package]]
 name = "six"
@@ -6069,13 +6083,13 @@ webhdfs = ["requests"]
 
 [[package]]
 name = "sniffio"
-version = "1.3.0"
+version = "1.3.1"
 description = "Sniff out which async library your code is running under"
 optional = false
 python-versions = ">=3.7"
 files = [
-    {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"},
-    {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"},
+    {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"},
+    {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"},
 ]
 
 [[package]]
@@ -6457,60 +6471,60 @@ test = ["pytest"]
 
 [[package]]
 name = "sqlalchemy"
-version = "2.0.27"
+version = "2.0.28"
 description = "Database Abstraction Library"
 optional = false
 python-versions = ">=3.7"
 files = [
-    {file = "SQLAlchemy-2.0.27-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d04e579e911562f1055d26dab1868d3e0bb905db3bccf664ee8ad109f035618a"},
-    {file = "SQLAlchemy-2.0.27-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fa67d821c1fd268a5a87922ef4940442513b4e6c377553506b9db3b83beebbd8"},
-    {file = "SQLAlchemy-2.0.27-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c7a596d0be71b7baa037f4ac10d5e057d276f65a9a611c46970f012752ebf2d"},
-    {file = "SQLAlchemy-2.0.27-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:954d9735ee9c3fa74874c830d089a815b7b48df6f6b6e357a74130e478dbd951"},
-    {file = "SQLAlchemy-2.0.27-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5cd20f58c29bbf2680039ff9f569fa6d21453fbd2fa84dbdb4092f006424c2e6"},
-    {file = "SQLAlchemy-2.0.27-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:03f448ffb731b48323bda68bcc93152f751436ad6037f18a42b7e16af9e91c07"},
-    {file = "SQLAlchemy-2.0.27-cp310-cp310-win32.whl", hash = "sha256:d997c5938a08b5e172c30583ba6b8aad657ed9901fc24caf3a7152eeccb2f1b4"},
-    {file = "SQLAlchemy-2.0.27-cp310-cp310-win_amd64.whl", hash = "sha256:eb15ef40b833f5b2f19eeae65d65e191f039e71790dd565c2af2a3783f72262f"},
-    {file = "SQLAlchemy-2.0.27-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6c5bad7c60a392850d2f0fee8f355953abaec878c483dd7c3836e0089f046bf6"},
-    {file = "SQLAlchemy-2.0.27-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3012ab65ea42de1be81fff5fb28d6db893ef978950afc8130ba707179b4284a"},
-    {file = "SQLAlchemy-2.0.27-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbcd77c4d94b23e0753c5ed8deba8c69f331d4fd83f68bfc9db58bc8983f49cd"},
-    {file = "SQLAlchemy-2.0.27-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d177b7e82f6dd5e1aebd24d9c3297c70ce09cd1d5d37b43e53f39514379c029c"},
-    {file = "SQLAlchemy-2.0.27-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:680b9a36029b30cf063698755d277885d4a0eab70a2c7c6e71aab601323cba45"},
-    {file = "SQLAlchemy-2.0.27-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1306102f6d9e625cebaca3d4c9c8f10588735ef877f0360b5cdb4fdfd3fd7131"},
-    {file = "SQLAlchemy-2.0.27-cp311-cp311-win32.whl", hash = "sha256:5b78aa9f4f68212248aaf8943d84c0ff0f74efc65a661c2fc68b82d498311fd5"},
-    {file = "SQLAlchemy-2.0.27-cp311-cp311-win_amd64.whl", hash = "sha256:15e19a84b84528f52a68143439d0c7a3a69befcd4f50b8ef9b7b69d2628ae7c4"},
-    {file = "SQLAlchemy-2.0.27-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0de1263aac858f288a80b2071990f02082c51d88335a1db0d589237a3435fe71"},
-    {file = "SQLAlchemy-2.0.27-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce850db091bf7d2a1f2fdb615220b968aeff3849007b1204bf6e3e50a57b3d32"},
-    {file = "SQLAlchemy-2.0.27-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dfc936870507da96aebb43e664ae3a71a7b96278382bcfe84d277b88e379b18"},
-    {file = "SQLAlchemy-2.0.27-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4fbe6a766301f2e8a4519f4500fe74ef0a8509a59e07a4085458f26228cd7cc"},
-    {file = "SQLAlchemy-2.0.27-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4535c49d961fe9a77392e3a630a626af5baa967172d42732b7a43496c8b28876"},
-    {file = "SQLAlchemy-2.0.27-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0fb3bffc0ced37e5aa4ac2416f56d6d858f46d4da70c09bb731a246e70bff4d5"},
-    {file = "SQLAlchemy-2.0.27-cp312-cp312-win32.whl", hash = "sha256:7f470327d06400a0aa7926b375b8e8c3c31d335e0884f509fe272b3c700a7254"},
-    {file = "SQLAlchemy-2.0.27-cp312-cp312-win_amd64.whl", hash = "sha256:f9374e270e2553653d710ece397df67db9d19c60d2647bcd35bfc616f1622dcd"},
-    {file = "SQLAlchemy-2.0.27-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e97cf143d74a7a5a0f143aa34039b4fecf11343eed66538610debc438685db4a"},
-    {file = "SQLAlchemy-2.0.27-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7b5a3e2120982b8b6bd1d5d99e3025339f7fb8b8267551c679afb39e9c7c7f1"},
-    {file = "SQLAlchemy-2.0.27-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e36aa62b765cf9f43a003233a8c2d7ffdeb55bc62eaa0a0380475b228663a38f"},
-    {file = "SQLAlchemy-2.0.27-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5ada0438f5b74c3952d916c199367c29ee4d6858edff18eab783b3978d0db16d"},
-    {file = "SQLAlchemy-2.0.27-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b1d9d1bfd96eef3c3faedb73f486c89e44e64e40e5bfec304ee163de01cf996f"},
-    {file = "SQLAlchemy-2.0.27-cp37-cp37m-win32.whl", hash = "sha256:ca891af9f3289d24a490a5fde664ea04fe2f4984cd97e26de7442a4251bd4b7c"},
-    {file = "SQLAlchemy-2.0.27-cp37-cp37m-win_amd64.whl", hash = "sha256:fd8aafda7cdff03b905d4426b714601c0978725a19efc39f5f207b86d188ba01"},
-    {file = "SQLAlchemy-2.0.27-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ec1f5a328464daf7a1e4e385e4f5652dd9b1d12405075ccba1df842f7774b4fc"},
-    {file = "SQLAlchemy-2.0.27-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ad862295ad3f644e3c2c0d8b10a988e1600d3123ecb48702d2c0f26771f1c396"},
-    {file = "SQLAlchemy-2.0.27-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48217be1de7d29a5600b5c513f3f7664b21d32e596d69582be0a94e36b8309cb"},
-    {file = "SQLAlchemy-2.0.27-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e56afce6431450442f3ab5973156289bd5ec33dd618941283847c9fd5ff06bf"},
-    {file = "SQLAlchemy-2.0.27-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:611068511b5531304137bcd7fe8117c985d1b828eb86043bd944cebb7fae3910"},
-    {file = "SQLAlchemy-2.0.27-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b86abba762ecfeea359112b2bb4490802b340850bbee1948f785141a5e020de8"},
-    {file = "SQLAlchemy-2.0.27-cp38-cp38-win32.whl", hash = "sha256:30d81cc1192dc693d49d5671cd40cdec596b885b0ce3b72f323888ab1c3863d5"},
-    {file = "SQLAlchemy-2.0.27-cp38-cp38-win_amd64.whl", hash = "sha256:120af1e49d614d2525ac247f6123841589b029c318b9afbfc9e2b70e22e1827d"},
-    {file = "SQLAlchemy-2.0.27-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d07ee7793f2aeb9b80ec8ceb96bc8cc08a2aec8a1b152da1955d64e4825fcbac"},
-    {file = "SQLAlchemy-2.0.27-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cb0845e934647232b6ff5150df37ceffd0b67b754b9fdbb095233deebcddbd4a"},
-    {file = "SQLAlchemy-2.0.27-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fc19ae2e07a067663dd24fca55f8ed06a288384f0e6e3910420bf4b1270cc51"},
-    {file = "SQLAlchemy-2.0.27-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b90053be91973a6fb6020a6e44382c97739736a5a9d74e08cc29b196639eb979"},
-    {file = "SQLAlchemy-2.0.27-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2f5c9dfb0b9ab5e3a8a00249534bdd838d943ec4cfb9abe176a6c33408430230"},
-    {file = "SQLAlchemy-2.0.27-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:33e8bde8fff203de50399b9039c4e14e42d4d227759155c21f8da4a47fc8053c"},
-    {file = "SQLAlchemy-2.0.27-cp39-cp39-win32.whl", hash = "sha256:d873c21b356bfaf1589b89090a4011e6532582b3a8ea568a00e0c3aab09399dd"},
-    {file = "SQLAlchemy-2.0.27-cp39-cp39-win_amd64.whl", hash = "sha256:ff2f1b7c963961d41403b650842dc2039175b906ab2093635d8319bef0b7d620"},
-    {file = "SQLAlchemy-2.0.27-py3-none-any.whl", hash = "sha256:1ab4e0448018d01b142c916cc7119ca573803a4745cfe341b8f95657812700ac"},
-    {file = "SQLAlchemy-2.0.27.tar.gz", hash = "sha256:86a6ed69a71fe6b88bf9331594fa390a2adda4a49b5c06f98e47bf0d392534f8"},
+    {file = "SQLAlchemy-2.0.28-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0b148ab0438f72ad21cb004ce3bdaafd28465c4276af66df3b9ecd2037bf252"},
+    {file = "SQLAlchemy-2.0.28-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bbda76961eb8f27e6ad3c84d1dc56d5bc61ba8f02bd20fcf3450bd421c2fcc9c"},
+    {file = "SQLAlchemy-2.0.28-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feea693c452d85ea0015ebe3bb9cd15b6f49acc1a31c28b3c50f4db0f8fb1e71"},
+    {file = "SQLAlchemy-2.0.28-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5da98815f82dce0cb31fd1e873a0cb30934971d15b74e0d78cf21f9e1b05953f"},
+    {file = "SQLAlchemy-2.0.28-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4a5adf383c73f2d49ad15ff363a8748319ff84c371eed59ffd0127355d6ea1da"},
+    {file = "SQLAlchemy-2.0.28-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56856b871146bfead25fbcaed098269d90b744eea5cb32a952df00d542cdd368"},
+    {file = "SQLAlchemy-2.0.28-cp310-cp310-win32.whl", hash = "sha256:943aa74a11f5806ab68278284a4ddd282d3fb348a0e96db9b42cb81bf731acdc"},
+    {file = "SQLAlchemy-2.0.28-cp310-cp310-win_amd64.whl", hash = "sha256:c6c4da4843e0dabde41b8f2e8147438330924114f541949e6318358a56d1875a"},
+    {file = "SQLAlchemy-2.0.28-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46a3d4e7a472bfff2d28db838669fc437964e8af8df8ee1e4548e92710929adc"},
+    {file = "SQLAlchemy-2.0.28-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d3dd67b5d69794cfe82862c002512683b3db038b99002171f624712fa71aeaa"},
+    {file = "SQLAlchemy-2.0.28-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61e2e41656a673b777e2f0cbbe545323dbe0d32312f590b1bc09da1de6c2a02"},
+    {file = "SQLAlchemy-2.0.28-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0315d9125a38026227f559488fe7f7cee1bd2fbc19f9fd637739dc50bb6380b2"},
+    {file = "SQLAlchemy-2.0.28-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:af8ce2d31679006e7b747d30a89cd3ac1ec304c3d4c20973f0f4ad58e2d1c4c9"},
+    {file = "SQLAlchemy-2.0.28-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:81ba314a08c7ab701e621b7ad079c0c933c58cdef88593c59b90b996e8b58fa5"},
+    {file = "SQLAlchemy-2.0.28-cp311-cp311-win32.whl", hash = "sha256:1ee8bd6d68578e517943f5ebff3afbd93fc65f7ef8f23becab9fa8fb315afb1d"},
+    {file = "SQLAlchemy-2.0.28-cp311-cp311-win_amd64.whl", hash = "sha256:ad7acbe95bac70e4e687a4dc9ae3f7a2f467aa6597049eeb6d4a662ecd990bb6"},
+    {file = "SQLAlchemy-2.0.28-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d3499008ddec83127ab286c6f6ec82a34f39c9817f020f75eca96155f9765097"},
+    {file = "SQLAlchemy-2.0.28-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9b66fcd38659cab5d29e8de5409cdf91e9986817703e1078b2fdaad731ea66f5"},
+    {file = "SQLAlchemy-2.0.28-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bea30da1e76cb1acc5b72e204a920a3a7678d9d52f688f087dc08e54e2754c67"},
+    {file = "SQLAlchemy-2.0.28-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:124202b4e0edea7f08a4db8c81cc7859012f90a0d14ba2bf07c099aff6e96462"},
+    {file = "SQLAlchemy-2.0.28-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e23b88c69497a6322b5796c0781400692eca1ae5532821b39ce81a48c395aae9"},
+    {file = "SQLAlchemy-2.0.28-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b6303bfd78fb3221847723104d152e5972c22367ff66edf09120fcde5ddc2e2"},
+    {file = "SQLAlchemy-2.0.28-cp312-cp312-win32.whl", hash = "sha256:a921002be69ac3ab2cf0c3017c4e6a3377f800f1fca7f254c13b5f1a2f10022c"},
+    {file = "SQLAlchemy-2.0.28-cp312-cp312-win_amd64.whl", hash = "sha256:b4a2cf92995635b64876dc141af0ef089c6eea7e05898d8d8865e71a326c0385"},
+    {file = "SQLAlchemy-2.0.28-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e91b5e341f8c7f1e5020db8e5602f3ed045a29f8e27f7f565e0bdee3338f2c7"},
+    {file = "SQLAlchemy-2.0.28-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45c7b78dfc7278329f27be02c44abc0d69fe235495bb8e16ec7ef1b1a17952db"},
+    {file = "SQLAlchemy-2.0.28-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3eba73ef2c30695cb7eabcdb33bb3d0b878595737479e152468f3ba97a9c22a4"},
+    {file = "SQLAlchemy-2.0.28-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5df5d1dafb8eee89384fb7a1f79128118bc0ba50ce0db27a40750f6f91aa99d5"},
+    {file = "SQLAlchemy-2.0.28-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2858bbab1681ee5406650202950dc8f00e83b06a198741b7c656e63818633526"},
+    {file = "SQLAlchemy-2.0.28-cp37-cp37m-win32.whl", hash = "sha256:9461802f2e965de5cff80c5a13bc945abea7edaa1d29360b485c3d2b56cdb075"},
+    {file = "SQLAlchemy-2.0.28-cp37-cp37m-win_amd64.whl", hash = "sha256:a6bec1c010a6d65b3ed88c863d56b9ea5eeefdf62b5e39cafd08c65f5ce5198b"},
+    {file = "SQLAlchemy-2.0.28-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:843a882cadebecc655a68bd9a5b8aa39b3c52f4a9a5572a3036fb1bb2ccdc197"},
+    {file = "SQLAlchemy-2.0.28-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:dbb990612c36163c6072723523d2be7c3eb1517bbdd63fe50449f56afafd1133"},
+    {file = "SQLAlchemy-2.0.28-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7e4baf9161d076b9a7e432fce06217b9bd90cfb8f1d543d6e8c4595627edb9"},
+    {file = "SQLAlchemy-2.0.28-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0a5354cb4de9b64bccb6ea33162cb83e03dbefa0d892db88a672f5aad638a75"},
+    {file = "SQLAlchemy-2.0.28-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:fffcc8edc508801ed2e6a4e7b0d150a62196fd28b4e16ab9f65192e8186102b6"},
+    {file = "SQLAlchemy-2.0.28-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aca7b6d99a4541b2ebab4494f6c8c2f947e0df4ac859ced575238e1d6ca5716b"},
+    {file = "SQLAlchemy-2.0.28-cp38-cp38-win32.whl", hash = "sha256:8c7f10720fc34d14abad5b647bc8202202f4948498927d9f1b4df0fb1cf391b7"},
+    {file = "SQLAlchemy-2.0.28-cp38-cp38-win_amd64.whl", hash = "sha256:243feb6882b06a2af68ecf4bec8813d99452a1b62ba2be917ce6283852cf701b"},
+    {file = "SQLAlchemy-2.0.28-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fc4974d3684f28b61b9a90fcb4c41fb340fd4b6a50c04365704a4da5a9603b05"},
+    {file = "SQLAlchemy-2.0.28-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87724e7ed2a936fdda2c05dbd99d395c91ea3c96f029a033a4a20e008dd876bf"},
+    {file = "SQLAlchemy-2.0.28-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68722e6a550f5de2e3cfe9da6afb9a7dd15ef7032afa5651b0f0c6b3adb8815d"},
+    {file = "SQLAlchemy-2.0.28-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:328529f7c7f90adcd65aed06a161851f83f475c2f664a898af574893f55d9e53"},
+    {file = "SQLAlchemy-2.0.28-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:df40c16a7e8be7413b885c9bf900d402918cc848be08a59b022478804ea076b8"},
+    {file = "SQLAlchemy-2.0.28-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:426f2fa71331a64f5132369ede5171c52fd1df1bd9727ce621f38b5b24f48750"},
+    {file = "SQLAlchemy-2.0.28-cp39-cp39-win32.whl", hash = "sha256:33157920b233bc542ce497a81a2e1452e685a11834c5763933b440fedd1d8e2d"},
+    {file = "SQLAlchemy-2.0.28-cp39-cp39-win_amd64.whl", hash = "sha256:2f60843068e432311c886c5f03c4664acaef507cf716f6c60d5fde7265be9d7b"},
+    {file = "SQLAlchemy-2.0.28-py3-none-any.whl", hash = "sha256:78bb7e8da0183a8301352d569900d9d3594c48ac21dc1c2ec6b3121ed8b6c986"},
+    {file = "SQLAlchemy-2.0.28.tar.gz", hash = "sha256:dd53b6c4e6d960600fd6532b79ee28e2da489322fcf6648738134587faf767b6"},
 ]
 
 [package.dependencies]
@@ -6987,47 +7001,47 @@ files = [
 
 [[package]]
 name = "tomlkit"
-version = "0.12.3"
+version = "0.12.4"
 description = "Style preserving TOML library"
 optional = false
 python-versions = ">=3.7"
 files = [
-    {file = "tomlkit-0.12.3-py3-none-any.whl", hash = "sha256:b0a645a9156dc7cb5d3a1f0d4bab66db287fcb8e0430bdd4664a095ea16414ba"},
-    {file = "tomlkit-0.12.3.tar.gz", hash = "sha256:75baf5012d06501f07bee5bf8e801b9f343e7aac5a92581f20f80ce632e6b5a4"},
+    {file = "tomlkit-0.12.4-py3-none-any.whl", hash = "sha256:5cd82d48a3dd89dee1f9d64420aa20ae65cfbd00668d6f094d7578a78efbb77b"},
+    {file = "tomlkit-0.12.4.tar.gz", hash = "sha256:7ca1cfc12232806517a8515047ba66a19369e71edf2439d0f5824f91032b6cc3"},
 ]
 
 [[package]]
 name = "torch"
-version = "2.2.0"
+version = "2.2.1"
 description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration"
 optional = true
 python-versions = ">=3.8.0"
 files = [
-    {file = "torch-2.2.0-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:d366158d6503a3447e67f8c0ad1328d54e6c181d88572d688a625fac61b13a97"},
-    {file = "torch-2.2.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:707f2f80402981e9f90d0038d7d481678586251e6642a7a6ef67fc93511cb446"},
-    {file = "torch-2.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:15c8f0a105c66b28496092fca1520346082e734095f8eaf47b5786bac24b8a31"},
-    {file = "torch-2.2.0-cp310-none-macosx_10_9_x86_64.whl", hash = "sha256:0ca4df4b728515ad009b79f5107b00bcb2c63dc202d991412b9eb3b6a4f24349"},
-    {file = "torch-2.2.0-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:3d3eea2d5969b9a1c9401429ca79efc668120314d443d3463edc3289d7f003c7"},
-    {file = "torch-2.2.0-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:0d1c580e379c0d48f0f0a08ea28d8e373295aa254de4f9ad0631f9ed8bc04c24"},
-    {file = "torch-2.2.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:9328e3c1ce628a281d2707526b4d1080eae7c4afab4f81cea75bde1f9441dc78"},
-    {file = "torch-2.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:03c8e660907ac1b8ee07f6d929c4e15cd95be2fb764368799cca02c725a212b8"},
-    {file = "torch-2.2.0-cp311-none-macosx_10_9_x86_64.whl", hash = "sha256:da0cefe7f84ece3e3b56c11c773b59d1cb2c0fd83ddf6b5f7f1fd1a987b15c3e"},
-    {file = "torch-2.2.0-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:f81d23227034221a4a4ff8ef24cc6cec7901edd98d9e64e32822778ff01be85e"},
-    {file = "torch-2.2.0-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:dcbfb2192ac41ca93c756ebe9e2af29df0a4c14ee0e7a0dd78f82c67a63d91d4"},
-    {file = "torch-2.2.0-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:9eeb42971619e24392c9088b5b6d387d896e267889d41d267b1fec334f5227c5"},
-    {file = "torch-2.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:c718b2ca69a6cac28baa36d86d8c0ec708b102cebd1ceb1b6488e404cd9be1d1"},
-    {file = "torch-2.2.0-cp312-none-macosx_10_9_x86_64.whl", hash = "sha256:f11d18fceb4f9ecb1ac680dde7c463c120ed29056225d75469c19637e9f98d12"},
-    {file = "torch-2.2.0-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:ee1da852bfd4a7e674135a446d6074c2da7194c1b08549e31eae0b3138c6b4d2"},
-    {file = "torch-2.2.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:0d819399819d0862268ac531cf12a501c253007df4f9e6709ede8a0148f1a7b8"},
-    {file = "torch-2.2.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:08f53ccc38c49d839bc703ea1b20769cc8a429e0c4b20b56921a9f64949bf325"},
-    {file = "torch-2.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:93bffe3779965a71dab25fc29787538c37c5d54298fd2f2369e372b6fb137d41"},
-    {file = "torch-2.2.0-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:c17ec323da778efe8dad49d8fb534381479ca37af1bfc58efdbb8607a9d263a3"},
-    {file = "torch-2.2.0-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:c02685118008834e878f676f81eab3a952b7936fa31f474ef8a5ff4b5c78b36d"},
-    {file = "torch-2.2.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:d9f39d6f53cec240a0e3baa82cb697593340f9d4554cee6d3d6ca07925c2fac0"},
-    {file = "torch-2.2.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:51770c065206250dc1222ea7c0eff3f88ab317d3e931cca2aee461b85fbc2472"},
-    {file = "torch-2.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:008e4c6ad703de55af760c73bf937ecdd61a109f9b08f2bbb9c17e7c7017f194"},
-    {file = "torch-2.2.0-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:de8680472dd14e316f42ceef2a18a301461a9058cd6e99a1f1b20f78f11412f1"},
-    {file = "torch-2.2.0-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:99e1dcecb488e3fd25bcaac56e48cdb3539842904bdc8588b0b255fde03a254c"},
+    {file = "torch-2.2.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:8d3bad336dd2c93c6bcb3268e8e9876185bda50ebde325ef211fb565c7d15273"},
+    {file = "torch-2.2.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:5297f13370fdaca05959134b26a06a7f232ae254bf2e11a50eddec62525c9006"},
+    {file = "torch-2.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:5f5dee8433798888ca1415055f5e3faf28a3bad660e4c29e1014acd3275ab11a"},
+    {file = "torch-2.2.1-cp310-none-macosx_10_9_x86_64.whl", hash = "sha256:b6d78338acabf1fb2e88bf4559d837d30230cf9c3e4337261f4d83200df1fcbe"},
+    {file = "torch-2.2.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:6ab3ea2e29d1aac962e905142bbe50943758f55292f1b4fdfb6f4792aae3323e"},
+    {file = "torch-2.2.1-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:d86664ec85902967d902e78272e97d1aff1d331f7619d398d3ffab1c9b8e9157"},
+    {file = "torch-2.2.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:d6227060f268894f92c61af0a44c0d8212e19cb98d05c20141c73312d923bc0a"},
+    {file = "torch-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:77e990af75fb1675490deb374d36e726f84732cd5677d16f19124934b2409ce9"},
+    {file = "torch-2.2.1-cp311-none-macosx_10_9_x86_64.whl", hash = "sha256:46085e328d9b738c261f470231e987930f4cc9472d9ffb7087c7a1343826ac51"},
+    {file = "torch-2.2.1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:2d9e7e5ecbb002257cf98fae13003abbd620196c35f85c9e34c2adfb961321ec"},
+    {file = "torch-2.2.1-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:ada53aebede1c89570e56861b08d12ba4518a1f8b82d467c32665ec4d1f4b3c8"},
+    {file = "torch-2.2.1-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:be21d4c41ecebed9e99430dac87de1439a8c7882faf23bba7fea3fea7b906ac1"},
+    {file = "torch-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:79848f46196750367dcdf1d2132b722180b9d889571e14d579ae82d2f50596c5"},
+    {file = "torch-2.2.1-cp312-none-macosx_10_9_x86_64.whl", hash = "sha256:7ee804847be6be0032fbd2d1e6742fea2814c92bebccb177f0d3b8e92b2d2b18"},
+    {file = "torch-2.2.1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:84b2fb322ab091039fdfe74e17442ff046b258eb5e513a28093152c5b07325a7"},
+    {file = "torch-2.2.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:5c0c83aa7d94569997f1f474595e808072d80b04d34912ce6f1a0e1c24b0c12a"},
+    {file = "torch-2.2.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:91a1b598055ba06b2c386415d2e7f6ac818545e94c5def597a74754940188513"},
+    {file = "torch-2.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:8f93ddf3001ecec16568390b507652644a3a103baa72de3ad3b9c530e3277098"},
+    {file = "torch-2.2.1-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:0e8bdd4c77ac2584f33ee14c6cd3b12767b4da508ec4eed109520be7212d1069"},
+    {file = "torch-2.2.1-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:6a21bcd7076677c97ca7db7506d683e4e9db137e8420eb4a68fb67c3668232a7"},
+    {file = "torch-2.2.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f1b90ac61f862634039265cd0f746cc9879feee03ff962c803486301b778714b"},
+    {file = "torch-2.2.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:ed9e29eb94cd493b36bca9cb0b1fd7f06a0688215ad1e4b3ab4931726e0ec092"},
+    {file = "torch-2.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:c47bc25744c743f3835831a20efdcfd60aeb7c3f9804a213f61e45803d16c2a5"},
+    {file = "torch-2.2.1-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:0952549bcb43448c8d860d5e3e947dd18cbab491b14638e21750cb3090d5ad3e"},
+    {file = "torch-2.2.1-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:26bd2272ec46fc62dcf7d24b2fb284d44fcb7be9d529ebf336b9860350d674ed"},
 ]
 
 [package.dependencies]
@@ -7047,7 +7061,7 @@ nvidia-cusparse-cu12 = {version = "12.1.0.106", markers = "platform_system == \"
 nvidia-nccl-cu12 = {version = "2.19.3", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
 nvidia-nvtx-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
 sympy = "*"
-triton = {version = "2.2.0", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
+triton = {version = "2.2.0", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and python_version < \"3.12\""}
 typing-extensions = ">=4.8.0"
 
 [package.extras]
@@ -7111,13 +7125,13 @@ test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,
 
 [[package]]
 name = "transformers"
-version = "4.37.2"
+version = "4.38.2"
 description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow"
 optional = true
 python-versions = ">=3.8.0"
 files = [
-    {file = "transformers-4.37.2-py3-none-any.whl", hash = "sha256:595a8b12a1fcc4ad0ced49ce206c58e17be68c85d7aee3d7546d04a32c910d2e"},
-    {file = "transformers-4.37.2.tar.gz", hash = "sha256:f307082ae5d528b8480611a4879a4a11651012d0e9aaea3f6cf17219ffd95542"},
+    {file = "transformers-4.38.2-py3-none-any.whl", hash = "sha256:c4029cb9f01b3dd335e52f364c52d2b37c65b4c78e02e6a08b1919c5c928573e"},
+    {file = "transformers-4.38.2.tar.gz", hash = "sha256:c5fc7ad682b8a50a48b2a4c05d4ea2de5567adb1bdd00053619dbe5960857dd5"},
 ]
 
 [package.dependencies]
@@ -7133,21 +7147,21 @@ requests = "*"
 safetensors = ">=0.4.1"
 sentencepiece = {version = ">=0.1.91,<0.1.92 || >0.1.92", optional = true, markers = "extra == \"sentencepiece\""}
 tokenizers = ">=0.14,<0.19"
-torch = {version = ">=1.11,<1.12.0 || >1.12.0", optional = true, markers = "extra == \"torch\""}
+torch = {version = "*", optional = true, markers = "extra == \"torch\""}
 tqdm = ">=4.27"
 
 [package.extras]
 accelerate = ["accelerate (>=0.21.0)"]
-agents = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "datasets (!=2.5.0)", "diffusers", "opencv-python", "sentencepiece (>=0.1.91,!=0.1.92)", "torch (>=1.11,!=1.12.0)"]
-all = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm", "tokenizers (>=0.14,<0.19)", "torch (>=1.11,!=1.12.0)", "torchaudio", "torchvision"]
+agents = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "datasets (!=2.5.0)", "diffusers", "opencv-python", "sentencepiece (>=0.1.91,!=0.1.92)", "torch"]
+all = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm", "tokenizers (>=0.14,<0.19)", "torch", "torchaudio", "torchvision"]
 audio = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"]
 codecarbon = ["codecarbon (==1.2.0)"]
 deepspeed = ["accelerate (>=0.21.0)", "deepspeed (>=0.9.3)"]
-deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.21.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.9.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "optuna", "parameterized", "protobuf", "psutil", "pydantic (<2)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"]
-dev = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "decord (==0.6.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic (<2)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "timm", "tokenizers (>=0.14,<0.19)", "torch (>=1.11,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"]
-dev-tensorflow = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic (<2)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.14,<0.19)", "urllib3 (<2.0.0)"]
-dev-torch = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic (<2)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "timeout-decorator", "timm", "tokenizers (>=0.14,<0.19)", "torch (>=1.11,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"]
-docs = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "hf-doc-builder", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm", "tokenizers (>=0.14,<0.19)", "torch (>=1.11,!=1.12.0)", "torchaudio", "torchvision"]
+deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.21.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.9.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "optuna", "parameterized", "protobuf", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"]
+dev = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "decord (==0.6.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "timm", "tokenizers (>=0.14,<0.19)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"]
+dev-tensorflow = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.14,<0.19)", "urllib3 (<2.0.0)"]
+dev-torch = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "timeout-decorator", "timm", "tokenizers (>=0.14,<0.19)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"]
+docs = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "hf-doc-builder", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm", "tokenizers (>=0.14,<0.19)", "torch", "torchaudio", "torchvision"]
 docs-specific = ["hf-doc-builder"]
 flax = ["flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "optax (>=0.0.8,<=0.1.4)"]
 flax-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"]
@@ -7164,20 +7178,20 @@ ray = ["ray[tune] (>=2.7.0)"]
 retrieval = ["datasets (!=2.5.0)", "faiss-cpu"]
 sagemaker = ["sagemaker (>=2.31.0)"]
 sentencepiece = ["protobuf", "sentencepiece (>=0.1.91,!=0.1.92)"]
-serving = ["fastapi", "pydantic (<2)", "starlette", "uvicorn"]
+serving = ["fastapi", "pydantic", "starlette", "uvicorn"]
 sigopt = ["sigopt"]
 sklearn = ["scikit-learn"]
 speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"]
-testing = ["GitPython (<3.1.19)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "parameterized", "protobuf", "psutil", "pydantic (<2)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "tensorboard", "timeout-decorator"]
+testing = ["GitPython (<3.1.19)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "parameterized", "protobuf", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "tensorboard", "timeout-decorator"]
 tf = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx"]
 tf-cpu = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow-cpu (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx"]
 tf-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"]
 timm = ["timm"]
 tokenizers = ["tokenizers (>=0.14,<0.19)"]
-torch = ["accelerate (>=0.21.0)", "torch (>=1.11,!=1.12.0)"]
+torch = ["accelerate (>=0.21.0)", "torch"]
 torch-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"]
 torch-vision = ["Pillow (>=10.0.1,<=15.0)", "torchvision"]
-torchhub = ["filelock", "huggingface-hub (>=0.19.3,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.14,<0.19)", "torch (>=1.11,!=1.12.0)", "tqdm (>=4.27)"]
+torchhub = ["filelock", "huggingface-hub (>=0.19.3,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.14,<0.19)", "torch", "tqdm (>=4.27)"]
 video = ["av (==9.2.0)", "decord (==0.6.0)"]
 vision = ["Pillow (>=10.0.1,<=15.0)"]
 
@@ -7395,46 +7409,46 @@ test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.
 
 [[package]]
 name = "types-deprecated"
-version = "1.2.9.20240106"
+version = "1.2.9.20240311"
 description = "Typing stubs for Deprecated"
 optional = false
 python-versions = ">=3.8"
 files = [
-    {file = "types-Deprecated-1.2.9.20240106.tar.gz", hash = "sha256:afeb819e9a03d0a5795f18c88fe6207c48ed13c639e93281bd9d9b7bb6d34310"},
-    {file = "types_Deprecated-1.2.9.20240106-py3-none-any.whl", hash = "sha256:9dcb258493b5be407574ee21e50ddac9e429072d39b576126bf1ac00764fb9a8"},
+    {file = "types-Deprecated-1.2.9.20240311.tar.gz", hash = "sha256:0680e89989a8142707de8103f15d182445a533c1047fd9b7e8c5459101e9b90a"},
+    {file = "types_Deprecated-1.2.9.20240311-py3-none-any.whl", hash = "sha256:d7793aaf32ff8f7e49a8ac781de4872248e0694c4b75a7a8a186c51167463f9d"},
 ]
 
 [[package]]
 name = "types-docutils"
-version = "0.20.0.20240201"
+version = "0.20.0.20240311"
 description = "Typing stubs for docutils"
 optional = false
 python-versions = ">=3.8"
 files = [
-    {file = "types-docutils-0.20.0.20240201.tar.gz", hash = "sha256:ba4bfd4ff6dd19640ba7ab5d93900393a65897880f3650997964a943f4e79a6b"},
-    {file = "types_docutils-0.20.0.20240201-py3-none-any.whl", hash = "sha256:79d3bcef235f7c81a63f4f3dcf1d0b138985079bb32d02f5a7d266e1f9f361ba"},
+    {file = "types-docutils-0.20.0.20240311.tar.gz", hash = "sha256:b29df362e9e6efcba68e27af78135902d56614f6fa76c073d29989272eedb069"},
+    {file = "types_docutils-0.20.0.20240311-py3-none-any.whl", hash = "sha256:7d6ade724d89c7cbec390da4c63104f46d3f4e3eff0804998b38a274aa1e3d0c"},
 ]
 
 [[package]]
 name = "types-protobuf"
-version = "4.24.0.20240129"
+version = "4.24.0.20240311"
 description = "Typing stubs for protobuf"
 optional = false
 python-versions = ">=3.8"
 files = [
-    {file = "types-protobuf-4.24.0.20240129.tar.gz", hash = "sha256:8a83dd3b9b76a33e08d8636c5daa212ace1396418ed91837635fcd564a624891"},
-    {file = "types_protobuf-4.24.0.20240129-py3-none-any.whl", hash = "sha256:23be68cc29f3f5213b5c5878ac0151706182874040e220cfb11336f9ee642ead"},
+    {file = "types-protobuf-4.24.0.20240311.tar.gz", hash = "sha256:c80426f9fb9b21aee514691e96ab32a5cd694a82e2ac07964b352c3e7e0182bc"},
+    {file = "types_protobuf-4.24.0.20240311-py3-none-any.whl", hash = "sha256:8e039486df058141cb221ab99f88c5878c08cca4376db1d84f63279860aa09cd"},
 ]
 
 [[package]]
 name = "types-pyopenssl"
-version = "24.0.0.20240130"
+version = "24.0.0.20240311"
 description = "Typing stubs for pyOpenSSL"
 optional = false
 python-versions = ">=3.8"
 files = [
-    {file = "types-pyOpenSSL-24.0.0.20240130.tar.gz", hash = "sha256:c812e5c1c35249f75ef5935708b2a997d62abf9745be222e5f94b9595472ab25"},
-    {file = "types_pyOpenSSL-24.0.0.20240130-py3-none-any.whl", hash = "sha256:24a255458b5b8a7fca8139cf56f2a8ad5a4f1a5f711b73a5bb9cb50dc688fab5"},
+    {file = "types-pyOpenSSL-24.0.0.20240311.tar.gz", hash = "sha256:7bca00cfc4e7ef9c5d2663c6a1c068c35798e59670595439f6296e7ba3d58083"},
+    {file = "types_pyOpenSSL-24.0.0.20240311-py3-none-any.whl", hash = "sha256:6e8e8bfad34924067333232c93f7fc4b369856d8bea0d5c9d1808cb290ab1972"},
 ]
 
 [package.dependencies]
@@ -7442,24 +7456,24 @@ cryptography = ">=35.0.0"
 
 [[package]]
 name = "types-python-dateutil"
-version = "2.8.19.20240106"
+version = "2.8.19.20240311"
 description = "Typing stubs for python-dateutil"
 optional = false
 python-versions = ">=3.8"
 files = [
-    {file = "types-python-dateutil-2.8.19.20240106.tar.gz", hash = "sha256:1f8db221c3b98e6ca02ea83a58371b22c374f42ae5bbdf186db9c9a76581459f"},
-    {file = "types_python_dateutil-2.8.19.20240106-py3-none-any.whl", hash = "sha256:efbbdc54590d0f16152fa103c9879c7d4a00e82078f6e2cf01769042165acaa2"},
+    {file = "types-python-dateutil-2.8.19.20240311.tar.gz", hash = "sha256:51178227bbd4cbec35dc9adffbf59d832f20e09842d7dcb8c73b169b8780b7cb"},
+    {file = "types_python_dateutil-2.8.19.20240311-py3-none-any.whl", hash = "sha256:ef813da0809aca76472ca88807addbeea98b19339aebe56159ae2f4b4f70857a"},
 ]
 
 [[package]]
 name = "types-pyyaml"
-version = "6.0.12.12"
+version = "6.0.12.20240311"
 description = "Typing stubs for PyYAML"
 optional = false
-python-versions = "*"
+python-versions = ">=3.8"
 files = [
-    {file = "types-PyYAML-6.0.12.12.tar.gz", hash = "sha256:334373d392fde0fdf95af5c3f1661885fa10c52167b14593eb856289e1855062"},
-    {file = "types_PyYAML-6.0.12.12-py3-none-any.whl", hash = "sha256:c05bc6c158facb0676674b7f11fe3960db4f389718e19e62bd2b84d6205cfd24"},
+    {file = "types-PyYAML-6.0.12.20240311.tar.gz", hash = "sha256:a9e0f0f88dc835739b0c1ca51ee90d04ca2a897a71af79de9aec5f38cb0a5342"},
+    {file = "types_PyYAML-6.0.12.20240311-py3-none-any.whl", hash = "sha256:b845b06a1c7e54b8e5b4c683043de0d9caf205e7434b3edc678ff2411979b8f6"},
 ]
 
 [[package]]
@@ -7518,13 +7532,13 @@ files = [
 
 [[package]]
 name = "typing-extensions"
-version = "4.9.0"
+version = "4.10.0"
 description = "Backported and Experimental Type Hints for Python 3.8+"
 optional = false
 python-versions = ">=3.8"
 files = [
-    {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"},
-    {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"},
+    {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"},
+    {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"},
 ]
 
 [[package]]
@@ -7617,13 +7631,13 @@ pydantic = ">=1.9.2,<2.0.0"
 
 [[package]]
 name = "virtualenv"
-version = "20.25.0"
+version = "20.25.1"
 description = "Virtual Python Environment builder"
 optional = false
 python-versions = ">=3.7"
 files = [
-    {file = "virtualenv-20.25.0-py3-none-any.whl", hash = "sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3"},
-    {file = "virtualenv-20.25.0.tar.gz", hash = "sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b"},
+    {file = "virtualenv-20.25.1-py3-none-any.whl", hash = "sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a"},
+    {file = "virtualenv-20.25.1.tar.gz", hash = "sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197"},
 ]
 
 [package.dependencies]
diff --git a/llama-index-core/storage/default__vector_store.json b/llama-index-core/storage/default__vector_store.json
new file mode 100644
index 0000000000000000000000000000000000000000..b57c98cdf4e644fa07e689061d286160d9dd1c63
--- /dev/null
+++ b/llama-index-core/storage/default__vector_store.json
@@ -0,0 +1 @@
+{ "embedding_dict": {}, "text_id_to_ref_doc_id": {}, "metadata_dict": {} }
diff --git a/llama-index-core/storage/docstore.json b/llama-index-core/storage/docstore.json
new file mode 100644
index 0000000000000000000000000000000000000000..5776c1b7773b161533378ed9aa392f5ba26b610d
--- /dev/null
+++ b/llama-index-core/storage/docstore.json
@@ -0,0 +1,69 @@
+{
+  "docstore/data": {
+    "0ee51f2e-4f5a-4591-9428-359a0c203500": {
+      "__data__": {
+        "id_": "0ee51f2e-4f5a-4591-9428-359a0c203500",
+        "embedding": null,
+        "metadata": {},
+        "excluded_embed_metadata_keys": [],
+        "excluded_llm_metadata_keys": [],
+        "relationships": {},
+        "text": "a",
+        "start_char_idx": null,
+        "end_char_idx": null,
+        "text_template": "{metadata_str}\n\n{content}",
+        "metadata_template": "{key}: {value}",
+        "metadata_seperator": "\n",
+        "class_name": "TextNode"
+      },
+      "__type__": "1"
+    },
+    "470e55b7-6044-4336-9e36-2e6d814310f3": {
+      "__data__": {
+        "id_": "470e55b7-6044-4336-9e36-2e6d814310f3",
+        "embedding": null,
+        "metadata": {},
+        "excluded_embed_metadata_keys": [],
+        "excluded_llm_metadata_keys": [],
+        "relationships": {},
+        "text": "b",
+        "start_char_idx": null,
+        "end_char_idx": null,
+        "text_template": "{metadata_str}\n\n{content}",
+        "metadata_template": "{key}: {value}",
+        "metadata_seperator": "\n",
+        "class_name": "TextNode"
+      },
+      "__type__": "1"
+    },
+    "d00ee8c5-036a-44fe-a915-46609abd1438": {
+      "__data__": {
+        "id_": "d00ee8c5-036a-44fe-a915-46609abd1438",
+        "embedding": null,
+        "metadata": {},
+        "excluded_embed_metadata_keys": [],
+        "excluded_llm_metadata_keys": [],
+        "relationships": {},
+        "text": "c",
+        "start_char_idx": null,
+        "end_char_idx": null,
+        "text_template": "{metadata_str}\n\n{content}",
+        "metadata_template": "{key}: {value}",
+        "metadata_seperator": "\n",
+        "class_name": "TextNode"
+      },
+      "__type__": "1"
+    }
+  },
+  "docstore/metadata": {
+    "0ee51f2e-4f5a-4591-9428-359a0c203500": {
+      "doc_hash": "5f546eb4606b5c2b7d2a449a5cc2bbb477ed5a246c7051ce871b12f2dbfc8419"
+    },
+    "470e55b7-6044-4336-9e36-2e6d814310f3": {
+      "doc_hash": "ef2a875407aca2dd75a7a1dccc3b202918a9bc8e543e0b3def3f34ccd5e56a7b"
+    },
+    "d00ee8c5-036a-44fe-a915-46609abd1438": {
+      "doc_hash": "7ed0bc022e61394074858c49e5e2f8844146ae76fd02cc3b74ed8fb450e33b42"
+    }
+  }
+}
diff --git a/llama-index-core/storage/graph_store.json b/llama-index-core/storage/graph_store.json
new file mode 100644
index 0000000000000000000000000000000000000000..5ede88ebe0a6618690896173f746517c995048ee
--- /dev/null
+++ b/llama-index-core/storage/graph_store.json
@@ -0,0 +1 @@
+{ "graph_dict": {} }
diff --git a/llama-index-core/storage/image__vector_store.json b/llama-index-core/storage/image__vector_store.json
new file mode 100644
index 0000000000000000000000000000000000000000..b57c98cdf4e644fa07e689061d286160d9dd1c63
--- /dev/null
+++ b/llama-index-core/storage/image__vector_store.json
@@ -0,0 +1 @@
+{ "embedding_dict": {}, "text_id_to_ref_doc_id": {}, "metadata_dict": {} }
diff --git a/llama-index-core/storage/index_store.json b/llama-index-core/storage/index_store.json
new file mode 100644
index 0000000000000000000000000000000000000000..4e9a35061779f56279ad7644c7ea04e7329c8527
--- /dev/null
+++ b/llama-index-core/storage/index_store.json
@@ -0,0 +1,8 @@
+{
+  "index_store/data": {
+    "22ba1d5d-d5bd-4960-ae60-57ef30bc596b": {
+      "__type__": "list",
+      "__data__": "{\"index_id\": \"22ba1d5d-d5bd-4960-ae60-57ef30bc596b\", \"summary\": null, \"nodes\": [\"0ee51f2e-4f5a-4591-9428-359a0c203500\", \"470e55b7-6044-4336-9e36-2e6d814310f3\", \"d00ee8c5-036a-44fe-a915-46609abd1438\"]}"
+    }
+  }
+}
diff --git a/llama-index-core/storage/object_node_mapping.pickle b/llama-index-core/storage/object_node_mapping.pickle
new file mode 100644
index 0000000000000000000000000000000000000000..80304a8928214f8fe6050e62f10adbec70fdd17f
Binary files /dev/null and b/llama-index-core/storage/object_node_mapping.pickle differ
diff --git a/llama-index-core/tests/evaluation/test_batch_runner.py b/llama-index-core/tests/evaluation/test_batch_runner.py
index 0aead19a7576c4582f0554dc557edf2cca79d08c..7f2320d19b13b994da7d222ab5194818fe2a2a5a 100644
--- a/llama-index-core/tests/evaluation/test_batch_runner.py
+++ b/llama-index-core/tests/evaluation/test_batch_runner.py
@@ -6,6 +6,7 @@ from llama_index.core.evaluation.base import EvaluationResult
 from llama_index.core.prompts.mixin import PromptDictType
 
 from llama_index.core.evaluation.batch_runner import BatchEvalRunner
+import pytest
 
 
 class MockEvaluator(BaseEvaluator):
@@ -55,6 +56,7 @@ def get_eval_results(key, eval_results):
     return correct / len(results)
 
 
+@pytest.mark.asyncio()
 def test_batch_runner() -> None:
     # single evaluator
     runner = BatchEvalRunner(evaluators={"evaluator1": MockEvaluator()})
diff --git a/llama-index-integrations/agent/llama-index-agent-openai/tests/test_openai_agent.py b/llama-index-integrations/agent/llama-index-agent-openai/tests/test_openai_agent.py
deleted file mode 100644
index 1e860584c9e693b101ba1f80fc6aecb1dbbb4a02..0000000000000000000000000000000000000000
--- a/llama-index-integrations/agent/llama-index-agent-openai/tests/test_openai_agent.py
+++ /dev/null
@@ -1,341 +0,0 @@
-from typing import Any, AsyncGenerator, Generator, List, Sequence
-from unittest.mock import MagicMock, patch
-
-import pytest
-from llama_index.agent.openai.base import OpenAIAgent
-from llama_index.agent.openai.step import call_tool_with_error_handling
-from llama_index.core.base.llms.types import ChatMessage, ChatResponse
-from llama_index.core.chat_engine.types import (
-    AgentChatResponse,
-    StreamingAgentChatResponse,
-)
-from llama_index.core.llms.mock import MockLLM
-from llama_index.core.tools.function_tool import FunctionTool
-from llama_index.llms.openai import OpenAI
-
-from openai.types.chat.chat_completion import ChatCompletion, Choice
-from openai.types.chat.chat_completion_chunk import Choice as ChunkChoice
-from openai.types.chat.chat_completion_chunk import ChatCompletionChunk, ChoiceDelta
-from openai.types.chat.chat_completion_message import ChatCompletionMessage
-
-
-def mock_chat_completion(*args: Any, **kwargs: Any) -> ChatCompletion:
-    if "functions" in kwargs:
-        if not kwargs["functions"]:
-            raise ValueError("functions must not be empty")
-
-    # Example taken from https://platform.openai.com/docs/api-reference/chat/create
-    return ChatCompletion(
-        id="chatcmpl-abc123",
-        object="chat.completion",
-        created=1677858242,
-        model="gpt-3.5-turbo-0301",
-        usage={"prompt_tokens": 13, "completion_tokens": 7, "total_tokens": 20},
-        choices=[
-            Choice(
-                message=ChatCompletionMessage(
-                    role="assistant", content="\n\nThis is a test!"
-                ),
-                finish_reason="stop",
-                index=0,
-                logprobs=None,
-            )
-        ],
-    )
-
-
-def mock_chat_stream(
-    *args: Any, **kwargs: Any
-) -> Generator[ChatCompletionChunk, None, None]:
-    if "functions" in kwargs:
-        if not kwargs["functions"]:
-            raise ValueError("functions must not be empty")
-
-    yield ChatCompletionChunk(
-        id="chatcmpl-abc123",
-        object="chat.completion.chunk",
-        created=1677858242,
-        model="gpt-3.5-turbo-0301",
-        choices=[
-            ChunkChoice(
-                delta=ChoiceDelta(role="assistant", content="\n\nThis is a test!"),
-                finish_reason="stop",
-                index=0,
-                logprobs=None,
-            )
-        ],
-    )
-
-
-async def mock_achat_completion(*args: Any, **kwargs: Any) -> ChatCompletion:
-    return mock_chat_completion(*args, **kwargs)
-
-
-async def mock_achat_stream(
-    *args: Any, **kwargs: Any
-) -> AsyncGenerator[ChatCompletionChunk, None]:
-    async def _mock_achat_stream(
-        *args: Any, **kwargs: Any
-    ) -> AsyncGenerator[ChatCompletionChunk, None]:
-        if "functions" in kwargs:
-            if not kwargs["functions"]:
-                raise ValueError("functions must not be empty")
-
-        yield ChatCompletionChunk(
-            id="chatcmpl-abc123",
-            object="chat.completion.chunk",
-            created=1677858242,
-            model="gpt-3.5-turbo-0301",
-            choices=[
-                ChunkChoice(
-                    delta=ChoiceDelta(role="assistant", content="\n\nThis is a test!"),
-                    finish_reason="stop",
-                    index=0,
-                    logprobs=None,
-                )
-            ],
-        )
-
-    return _mock_achat_stream(*args, **kwargs)
-
-
-@pytest.fixture()
-def add_tool() -> FunctionTool:
-    def add(a: int, b: int) -> int:
-        """Add two integers and returns the result integer."""
-        return a + b
-
-    return FunctionTool.from_defaults(fn=add)
-
-
-class MockChatLLM(MockLLM):
-    def __init__(self, responses: List[ChatMessage]) -> None:
-        self._i = 0  # call counter, determines which response to return
-        self._responses = responses  # list of responses to return
-
-    def chat(self, messages: Sequence[ChatMessage], **kwargs: Any) -> ChatResponse:
-        del messages  # unused
-        response = ChatResponse(
-            message=self._responses[self._i],
-        )
-        self._i += 1
-        return response
-
-
-MOCK_ACTION_RESPONSE = """\
-Thought: I need to use a tool to help me answer the question.
-Action: add
-Action Input: {"a": 1, "b": 1}
-"""
-
-MOCK_FINAL_RESPONSE = """\
-Thought: I have enough information to answer the question without using any more tools.
-Answer: 2
-"""
-
-
-@patch("llama_index.llms.openai.base.SyncOpenAI")
-def test_chat_basic(MockSyncOpenAI: MagicMock, add_tool: FunctionTool) -> None:
-    mock_instance = MockSyncOpenAI.return_value
-    mock_instance.chat.completions.create.return_value = mock_chat_completion()
-
-    llm = OpenAI(model="gpt-3.5-turbo")
-
-    agent = OpenAIAgent.from_tools(
-        tools=[add_tool],
-        llm=llm,
-    )
-    response = agent.chat("What is 1 + 1?")
-    assert isinstance(response, AgentChatResponse)
-    assert response.response == "\n\nThis is a test!"
-    assert len(agent.chat_history) == 2
-    assert agent.chat_history[0].content == "What is 1 + 1?"
-    assert agent.chat_history[1].content == "\n\nThis is a test!"
-
-
-@patch("llama_index.llms.openai.base.AsyncOpenAI")
-@pytest.mark.asyncio()
-async def test_achat_basic(MockAsyncOpenAI: MagicMock, add_tool: FunctionTool) -> None:
-    mock_instance = MockAsyncOpenAI.return_value
-    mock_instance.chat.completions.create.return_value = mock_achat_completion()
-
-    llm = OpenAI(model="gpt-3.5-turbo")
-
-    agent = OpenAIAgent.from_tools(
-        tools=[add_tool],
-        llm=llm,
-    )
-    response = await agent.achat("What is 1 + 1?")
-    assert isinstance(response, AgentChatResponse)
-    assert response.response == "\n\nThis is a test!"
-    assert len(agent.chat_history) == 2
-    assert agent.chat_history[0].content == "What is 1 + 1?"
-    assert agent.chat_history[1].content == "\n\nThis is a test!"
-
-
-@patch("llama_index.llms.openai.base.SyncOpenAI")
-def test_stream_chat_basic(MockSyncOpenAI: MagicMock, add_tool: FunctionTool) -> None:
-    mock_instance = MockSyncOpenAI.return_value
-    mock_instance.chat.completions.create.side_effect = mock_chat_stream
-
-    llm = OpenAI(model="gpt-3.5-turbo")
-
-    agent = OpenAIAgent.from_tools(
-        tools=[add_tool],
-        llm=llm,
-    )
-    response = agent.stream_chat("What is 1 + 1?")
-    assert isinstance(response, StreamingAgentChatResponse)
-    # str() strips newline values
-    assert str(response) == "This is a test!"
-    assert len(agent.chat_history) == 2
-    assert agent.chat_history[0].content == "What is 1 + 1?"
-    assert agent.chat_history[1].content == "This is a test!"
-
-
-@patch("llama_index.llms.openai.base.AsyncOpenAI")
-@pytest.mark.asyncio()
-async def test_astream_chat_basic(
-    MockAsyncOpenAI: MagicMock, add_tool: FunctionTool
-) -> None:
-    mock_instance = MockAsyncOpenAI.return_value
-    mock_instance.chat.completions.create.side_effect = mock_achat_stream
-
-    llm = OpenAI(model="gpt-3.5-turbo")
-
-    agent = OpenAIAgent.from_tools(
-        tools=[add_tool],
-        llm=llm,
-    )
-    response_stream = await agent.astream_chat("What is 1 + 1?")
-    async for response in response_stream.async_response_gen():
-        pass
-    assert isinstance(response_stream, StreamingAgentChatResponse)
-    # str() strips newline values
-    assert response == "\n\nThis is a test!"
-    assert len(agent.chat_history) == 2
-    assert agent.chat_history[0].content == "What is 1 + 1?"
-    assert agent.chat_history[1].content == "This is a test!"
-
-
-@patch("llama_index.llms.openai.base.SyncOpenAI")
-def test_chat_no_functions(MockSyncOpenAI: MagicMock) -> None:
-    mock_instance = MockSyncOpenAI.return_value
-    mock_instance.chat.completions.create.return_value = mock_chat_completion()
-
-    llm = OpenAI(model="gpt-3.5-turbo")
-
-    agent = OpenAIAgent.from_tools(
-        llm=llm,
-    )
-    response = agent.chat("What is 1 + 1?")
-    assert isinstance(response, AgentChatResponse)
-    assert response.response == "\n\nThis is a test!"
-
-
-def test_call_tool_with_error_handling() -> None:
-    """Test call tool with error handling."""
-
-    def _add(a: int, b: int) -> int:
-        return a + b
-
-    tool = FunctionTool.from_defaults(fn=_add)
-
-    output = call_tool_with_error_handling(
-        tool, {"a": 1, "b": 1}, error_message="Error!"
-    )
-    assert output.content == "2"
-
-    # try error
-    output = call_tool_with_error_handling(
-        tool, {"a": "1", "b": 1}, error_message="Error!"
-    )
-    assert output.content == "Error!"
-
-
-@patch("llama_index.llms.openai.base.SyncOpenAI")
-def test_add_step(
-    MockSyncOpenAI: MagicMock,
-    add_tool: FunctionTool,
-) -> None:
-    """Test add step."""
-    mock_instance = MockSyncOpenAI.return_value
-    mock_instance.chat.completions.create.return_value = mock_chat_completion()
-
-    llm = OpenAI(model="gpt-3.5-turbo")
-    # sync
-    agent = OpenAIAgent.from_tools(
-        tools=[add_tool],
-        llm=llm,
-    )
-    ## NOTE: can only take a single step before finishing,
-    # since mocked chat output does not call any tools
-    task = agent.create_task("What is 1 + 1?")
-    step_output = agent.run_step(task.task_id)
-    assert str(step_output) == "\n\nThis is a test!"
-
-    # add human input (not used but should be in memory)
-    task = agent.create_task("What is 1 + 1?")
-    step_output = agent.run_step(task.task_id, input="tmp")
-    chat_history: List[ChatMessage] = task.extra_state["new_memory"].get_all()
-    assert "tmp" in [m.content for m in chat_history]
-
-    # # stream_step
-    # agent = OpenAIAgent.from_tools(
-    #     tools=[add_tool],
-    #     llm=llm,
-    # )
-    # task = agent.create_task("What is 1 + 1?")
-    # # first step
-    # step_output = agent.stream_step(task.task_id)
-    # # add human input (not used but should be in memory)
-    # step_output = agent.stream_step(task.task_id, input="tmp")
-    # chat_history: List[ChatMessage] = task.extra_state["new_memory"].get_all()
-    # assert "tmp" in [m.content for m in chat_history]
-
-
-@patch("llama_index.llms.openai.base.AsyncOpenAI")
-@pytest.mark.asyncio()
-async def test_async_add_step(
-    MockAsyncOpenAI: MagicMock,
-    add_tool: FunctionTool,
-) -> None:
-    mock_instance = MockAsyncOpenAI.return_value
-
-    llm = OpenAI(model="gpt-3.5-turbo")
-    # async
-    agent = OpenAIAgent.from_tools(
-        tools=[add_tool],
-        llm=llm,
-    )
-    task = agent.create_task("What is 1 + 1?")
-    # first step
-    mock_instance.chat.completions.create.return_value = mock_achat_completion()
-    step_output = await agent.arun_step(task.task_id)
-    # add human input (not used but should be in memory)
-    task = agent.create_task("What is 1 + 1?")
-    mock_instance.chat.completions.create.return_value = mock_achat_completion()
-    step_output = await agent.arun_step(task.task_id, input="tmp")
-    chat_history: List[ChatMessage] = task.extra_state["new_memory"].get_all()
-    assert "tmp" in [m.content for m in chat_history]
-
-    # async stream step
-    agent = OpenAIAgent.from_tools(
-        tools=[add_tool],
-        llm=llm,
-    )
-    task = agent.create_task("What is 1 + 1?")
-    # first step
-    mock_instance.chat.completions.create.side_effect = mock_achat_stream
-    step_output = await agent.astream_step(task.task_id)
-    # add human input (not used but should be in memory)
-    task = agent.create_task("What is 1 + 1?")
-    mock_instance.chat.completions.create.side_effect = mock_achat_stream
-
-    # stream the output to ensure it gets written to memory
-    step_output = await agent.astream_step(task.task_id, input="tmp")
-    async for _ in step_output.output.async_response_gen():
-        pass
-
-    chat_history = task.memory.get_all()
-    assert "tmp" in [m.content for m in chat_history]
diff --git a/llama-index-networks/llama_index/networks/network/query_engine.py b/llama-index-networks/llama_index/networks/network/query_engine.py
index bdfcd562cd0a17c810aa753bc05b233298308fb6..898a17f4a1a5415e91644486f87c317c78474849 100644
--- a/llama-index-networks/llama_index/networks/network/query_engine.py
+++ b/llama-index-networks/llama_index/networks/network/query_engine.py
@@ -5,7 +5,7 @@ from llama_index.core.base.base_query_engine import BaseQueryEngine
 from llama_index.core.callbacks.base import CallbackManager
 from llama_index.core.callbacks.schema import CBEventType, EventPayload
 from llama_index.core.base.response.schema import RESPONSE_TYPE
-from llama_index.core.schema import TextNode, NodeWithScore
+from llama_index.core.schema import TextNode, NodeWithScore, QueryBundle
 from llama_index.core.response_synthesizers import (
     BaseSynthesizer,
     ResponseMode,
@@ -78,14 +78,14 @@ class NetworkQueryEngine(BaseQueryEngine):
         """Get prompt sub-modules."""
         return {"response_synthesizer": self._response_synthesizer}
 
-    def _query(self, query: str) -> RESPONSE_TYPE:
+    def _query(self, query_bundle: QueryBundle) -> RESPONSE_TYPE:
         """Answer a query."""
         with self.callback_manager.event(
-            CBEventType.QUERY, payload={EventPayload.QUERY_STR: query}
+            CBEventType.QUERY, payload={EventPayload.QUERY_STR: query_bundle.query_str}
         ) as query_event:
             results = []
             async_tasks = [
-                contributor.aquery(query) for contributor in self._contributors
+                contributor.aquery(query_bundle) for contributor in self._contributors
             ]
             results = run_async_tasks(async_tasks)
 
@@ -96,7 +96,7 @@ class NetworkQueryEngine(BaseQueryEngine):
                 for el in results
             ]
             response = self._response_synthesizer.synthesize(
-                query=query,
+                query=query_bundle,
                 nodes=nodes,
             )
 
@@ -104,15 +104,15 @@ class NetworkQueryEngine(BaseQueryEngine):
 
         return response
 
-    async def _aquery(self, query: str) -> RESPONSE_TYPE:
+    async def _aquery(self, query_bundle: QueryBundle) -> RESPONSE_TYPE:
         """Answer a query."""
         with self.callback_manager.event(
-            CBEventType.QUERY, payload={EventPayload.QUERY_STR: query}
+            CBEventType.QUERY, payload={EventPayload.QUERY_STR: query_bundle.query_str}
         ) as query_event:
             # go through all clients
             query_tasks = []
             for contributor in self._contributors:
-                query_tasks += [contributor.aquery(query)]
+                query_tasks += [contributor.aquery(query_bundle)]
 
             results = await asyncio.gather(*query_tasks)
             nodes = [
@@ -122,7 +122,7 @@ class NetworkQueryEngine(BaseQueryEngine):
                 for el in results
             ]
             response = await self._response_synthesizer.asynthesize(
-                query=query,
+                query=query_bundle,
                 nodes=nodes,
             )
 
diff --git a/llama-index-networks/tests/contributor/retriever/test_retriever_service.py b/llama-index-networks/tests/contributor/retriever/test_retriever_service.py
index 82c0cfa22ee39038f9a4d11a6e8fe60e15ae00e7..f8ce13e68bcd14f3cf52f8ba0a4cb1aa7119c0e9 100644
--- a/llama-index-networks/tests/contributor/retriever/test_retriever_service.py
+++ b/llama-index-networks/tests/contributor/retriever/test_retriever_service.py
@@ -5,19 +5,27 @@ from llama_index.networks.contributor.retriever import (
     ContributorRetrieverServiceSettings,
 )
 from llama_index.core.base.base_retriever import BaseRetriever
-from llama_index.core.schema import NodeWithScore, TextNode
+from llama_index.core.schema import NodeWithScore, TextNode, QueryBundle
 
 
 class MockRetriever(BaseRetriever):
     """Custom retriever for testing."""
 
-    def _retrieve(self, query_str: str) -> List[NodeWithScore]:
+    def _retrieve(self, query_bundle: QueryBundle) -> List[NodeWithScore]:
         """Mock retrieval."""
-        return [NodeWithScore(node=TextNode(text=f"mock_{query_str}"), score=0.9)]
+        return [
+            NodeWithScore(
+                node=TextNode(text=f"mock_{query_bundle.query_str}"), score=0.9
+            )
+        ]
 
-    async def _aretrieve(self, query_str: str) -> List[NodeWithScore]:
+    async def _aretrieve(self, query_bundle: str) -> List[NodeWithScore]:
         """Mock retrieval."""
-        return [NodeWithScore(node=TextNode(text=f"mock_{query_str}"), score=0.9)]
+        return [
+            NodeWithScore(
+                node=TextNode(text=f"mock_{query_bundle.query_str}"), score=0.9
+            )
+        ]
 
 
 def test_contributor_service_index():
diff --git a/llama-index-packs/llama-index-packs-koda-retriever/llama_index/packs/koda_retriever/base.py b/llama-index-packs/llama-index-packs-koda-retriever/llama_index/packs/koda_retriever/base.py
index 9c03d93d675f5647b0d460df02c241c3a26aea9d..76b71da49432b50708f7ce97161e40861e7e044c 100644
--- a/llama-index-packs/llama-index-packs-koda-retriever/llama_index/packs/koda_retriever/base.py
+++ b/llama-index-packs/llama-index-packs-koda-retriever/llama_index/packs/koda_retriever/base.py
@@ -15,7 +15,7 @@ from llama_index.core.retrievers import VectorIndexRetriever
 from llama_index.core import VectorStoreIndex
 from llama_index.core.postprocessor.types import BaseNodePostprocessor
 from llama_index.core.llms import LLM
-from llama_index.core.schema import NodeWithScore, QueryType
+from llama_index.core.schema import NodeWithScore, QueryType, QueryBundle
 from llama_index.core.llama_pack.base import BaseLlamaPack
 from llama_index.core.base.response.schema import RESPONSE_TYPE
 from .constants import CATEGORIZER_PROMPT, DEFAULT_CATEGORIES
@@ -175,16 +175,16 @@ class KodaRetriever(BaseRetriever):
 
         return await self.retriever.aretrieve(str_or_query_bundle=query)
 
-    def _retrieve(self, query: QueryType) -> List[NodeWithScore]:
+    def _retrieve(self, query_bundle: QueryBundle) -> List[NodeWithScore]:
         """llama-index compatible retrieve method that auto-determines the optimal alpha for a query and then retrieves results for a query."""
         if not self.llm:
             warning = f"LLM is not provided, skipping route categorization. Default alpha of {self.default_alpha} will be used."
             logging.warning(warning)
 
-            results = self.retriever.retrieve(query)
+            results = self.retriever.retrieve(query_bundle)
         else:
-            category = self.categorize(query)  # type: ignore
-            results = self.category_retrieve(category, query)
+            category = self.categorize(query=query_bundle.query_str)  # type: ignore
+            results = self.category_retrieve(category, query_bundle)
             if self.verbose:
                 logging.info(
                     f"Query categorized as {category} with alpha of {self.matrix.get_alpha(category)}"
@@ -193,20 +193,22 @@ class KodaRetriever(BaseRetriever):
         if self.reranker:
             if self.verbose:
                 logging.info("Reranking results")
-            results = self.reranker.postprocess_nodes(nodes=results, query_str=query)
+            results = self.reranker.postprocess_nodes(
+                nodes=results, query_bundle=query_bundle
+            )
 
         return results
 
-    async def _aretrieve(self, query: QueryType) -> List[NodeWithScore]:
+    async def _aretrieve(self, query_bundle: QueryBundle) -> List[NodeWithScore]:
         """(async) llama-index compatible retrieve method that auto-determines the optimal alpha for a query and then retrieves results for a query."""
         if not self.llm:
             warning = f"LLM is not provided, skipping route categorization. Default alpha of {self.default_alpha} will be used."
             logging.warning(warning)
 
-            results = await self.retriever.aretrieve(query)
+            results = await self.retriever.aretrieve(query_bundle)
         else:
-            category = await self.a_categorize(query)  # type: ignore
-            results = await self.a_category_retrieve(category, query)
+            category = await self.a_categorize(query_bundle.query_str)  # type: ignore
+            results = await self.a_category_retrieve(category, query_bundle)
             if self.verbose:
                 logging.info(
                     f"Query categorized as {category} with alpha of {self.matrix.get_alpha(category)}"
@@ -215,7 +217,9 @@ class KodaRetriever(BaseRetriever):
         if self.reranker:
             if self.verbose:
                 logging.info("Reranking results")
-            results = self.reranker.postprocess_nodes(nodes=results, query_str=query)
+            results = self.reranker.postprocess_nodes(
+                nodes=results, query_bundle=query_bundle
+            )
 
         return results
 
diff --git a/llama-index-packs/llama-index-packs-koda-retriever/pyproject.toml b/llama-index-packs/llama-index-packs-koda-retriever/pyproject.toml
index 0ee676967206e84c7b43f10490432c15b23edd5f..76c4de63b9851f1226781e6f6d172f7ae5210cd0 100644
--- a/llama-index-packs/llama-index-packs-koda-retriever/pyproject.toml
+++ b/llama-index-packs/llama-index-packs-koda-retriever/pyproject.toml
@@ -30,7 +30,7 @@ license = "MIT"
 name = "llama-index-packs-koda-retriever"
 packages = [{include = "llama_index/"}]
 readme = "README.md"
-version = "0.1.0"
+version = "0.1.1"
 
 [tool.poetry.dependencies]
 python = ">=3.8.1,<3.12"