From bad3afa8ba5d43b4b2f8d81d8e04bf7d6f52f9b0 Mon Sep 17 00:00:00 2001
From: liuyhwangyh <liuyhwangyh@163.com>
Date: Tue, 20 Feb 2024 10:28:32 +0800
Subject: [PATCH] upgrade dashscope to new architecture (#10966)

---
 .../embeddings/dashscope_embeddings.ipynb     |  14 +-
 docs/examples/llm/dashscope.ipynb             | 491 ++++++++++++++++++
 .../multi_modal/dashscope_multi_modal.ipynb   | 315 +++++++++++
 .../.gitignore                                | 153 ++++++
 .../llama-index-embeddings-dashscope/BUILD    |   3 +
 .../llama-index-embeddings-dashscope/Makefile |  17 +
 .../README.md                                 |   1 +
 .../llama_index/embeddings/dashscope/BUILD    |   1 +
 .../embeddings/dashscope/__init__.py          |  16 +
 .../llama_index/embeddings/dashscope/base.py  | 307 +++++++++++
 .../pyproject.toml                            |  55 ++
 .../tests/BUILD                               |   1 +
 .../tests/__init__.py                         |   0
 .../tests/test_embeddings_dashscope.py        |   7 +
 14 files changed, 1374 insertions(+), 7 deletions(-)
 create mode 100644 docs/examples/llm/dashscope.ipynb
 create mode 100644 docs/examples/multi_modal/dashscope_multi_modal.ipynb
 create mode 100644 llama-index-integrations/embeddings/llama-index-embeddings-dashscope/.gitignore
 create mode 100644 llama-index-integrations/embeddings/llama-index-embeddings-dashscope/BUILD
 create mode 100644 llama-index-integrations/embeddings/llama-index-embeddings-dashscope/Makefile
 create mode 100644 llama-index-integrations/embeddings/llama-index-embeddings-dashscope/README.md
 create mode 100644 llama-index-integrations/embeddings/llama-index-embeddings-dashscope/llama_index/embeddings/dashscope/BUILD
 create mode 100644 llama-index-integrations/embeddings/llama-index-embeddings-dashscope/llama_index/embeddings/dashscope/__init__.py
 create mode 100644 llama-index-integrations/embeddings/llama-index-embeddings-dashscope/llama_index/embeddings/dashscope/base.py
 create mode 100644 llama-index-integrations/embeddings/llama-index-embeddings-dashscope/pyproject.toml
 create mode 100644 llama-index-integrations/embeddings/llama-index-embeddings-dashscope/tests/BUILD
 create mode 100644 llama-index-integrations/embeddings/llama-index-embeddings-dashscope/tests/__init__.py
 create mode 100644 llama-index-integrations/embeddings/llama-index-embeddings-dashscope/tests/test_embeddings_dashscope.py

diff --git a/docs/examples/embeddings/dashscope_embeddings.ipynb b/docs/examples/embeddings/dashscope_embeddings.ipynb
index 658ceeb336..69a90789a4 100644
--- a/docs/examples/embeddings/dashscope_embeddings.ipynb
+++ b/docs/examples/embeddings/dashscope_embeddings.ipynb
@@ -29,8 +29,8 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "%pip install llama-index\n",
-    "%pip install -U dashscope"
+    "%pip install llama-index-core\n",
+    "%pip install llama-index-embeddings-dashscope"
    ]
   },
   {
@@ -61,7 +61,7 @@
    ],
    "source": [
     "# imports\n",
-    "from llama_index.core.embeddings import (\n",
+    "from llama_index.embeddings.dashscope import (\n",
     "    DashScopeEmbedding,\n",
     "    DashScopeTextEmbeddingModels,\n",
     "    DashScopeTextEmbeddingType,\n",
@@ -95,7 +95,7 @@
    ],
    "source": [
     "# imports\n",
-    "from llama_index.core.embeddings import (\n",
+    "from llama_index.embeddings.dashscope import (\n",
     "    DashScopeEmbedding,\n",
     "    DashScopeTextEmbeddingModels,\n",
     "    DashScopeTextEmbeddingType,\n",
@@ -128,7 +128,7 @@
    ],
    "source": [
     "# call batch text embedding\n",
-    "from llama_index.core.embeddings import (\n",
+    "from llama_index.embeddings.dashscope import (\n",
     "    DashScopeEmbedding,\n",
     "    DashScopeBatchTextEmbeddingModels,\n",
     "    DashScopeTextEmbeddingType,\n",
@@ -161,7 +161,7 @@
    ],
    "source": [
     "# call multimodal embedding service\n",
-    "from llama_index.core.embeddings import (\n",
+    "from llama_index.embeddings.dashscope import (\n",
     "    DashScopeEmbedding,\n",
     "    DashScopeMultiModalEmbeddingModels,\n",
     ")\n",
@@ -193,7 +193,7 @@
    ],
    "source": [
     "# call multimodal embedding service\n",
-    "from llama_index.core.embeddings import (\n",
+    "from llama_index.embeddings.dashscope import (\n",
     "    DashScopeEmbedding,\n",
     "    DashScopeMultiModalEmbeddingModels,\n",
     ")\n",
diff --git a/docs/examples/llm/dashscope.ipynb b/docs/examples/llm/dashscope.ipynb
new file mode 100644
index 0000000000..55afc8e6c0
--- /dev/null
+++ b/docs/examples/llm/dashscope.ipynb
@@ -0,0 +1,491 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "<a href=\"https://colab.research.google.com/github/run-llama/llama_index/blob/main/docs/examples/llm/dashscope.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# DashScope LLMS"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "In this notebook, we show how to use the DashScope LLM models in LlamaIndex. Check out the [DashScope site](https://dashscope.aliyun.com/) or the [documents](https://help.aliyun.com/zh/dashscope/developer-reference/api-details).\n",
+    "\n",
+    "If you're opening this Notebook on colab, you will need to install LlamaIndex 🦙 and the DashScope Python SDK."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "!pip install llama-index-llms-dashscope"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Basic Usage\n",
+    "\n",
+    "You will need to login [DashScope](https://dashscope.aliyun.com/) an create a API. Once you have one, you can either pass it explicitly to the API, or use the `DASHSCOPE_API_KEY` environment variable."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "env: DASHSCOPE_API_KEY=YOUR_DASHSCOPE_API_KEY\n"
+     ]
+    }
+   ],
+   "source": [
+    "%env DASHSCOPE_API_KEY=YOUR_DASHSCOPE_API_KEY"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import os\n",
+    "\n",
+    "os.environ[\"DASHSCOPE_API_KEY\"] = \"YOUR_DASHSCOPE_API_KEY\""
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "#### Initialize `DashScope` Object"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from llama_index.llms.dashscope import DashScope, DashScopeGenerationModels\n",
+    "\n",
+    "dashscope_llm = DashScope(model_name=DashScopeGenerationModels.QWEN_MAX)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "#### Call `complete` with a prompt"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Making a basic vanilla cake from scratch is a simple and enjoyable process. Here's a step-by-step recipe for a classic vanilla sponge cake:\n",
+      "\n",
+      "**Ingredients:**\n",
+      "- 2 cups (240g) all-purpose flour\n",
+      "- 1 ¾ cups (350g) granulated sugar\n",
+      "- 3 teaspoons baking powder\n",
+      "- ½ teaspoon salt\n",
+      "- â…“ cup (80g) unsalted butter, softened\n",
+      "- 1 cup (240ml) whole milk, at room temperature\n",
+      "- 2 teaspoons pure vanilla extract\n",
+      "- 3 large eggs, at room temperature\n",
+      "\n",
+      "**Instructions:**\n",
+      "\n",
+      "1. **Preheat the Oven**: Preheat your oven to 350°F (175°C). Grease two 9-inch round cake pans with butter or cooking spray, then line the bottoms with parchment paper.\n",
+      "\n",
+      "2. **Dry Ingredients**: In a large mixing bowl, sift together the flour, sugar, baking powder, and salt. Make sure everything is well combined.\n",
+      "\n",
+      "3. **Cream the Butter and Sugar**: In another large bowl, beat the softened butter until creamy. Gradually add the sugar and continue beating until light and fluffy, about 3-4 minutes.\n",
+      "\n",
+      "4. **Add Eggs and Vanilla**: Beat in the eggs one at a time, making sure each egg is fully incorporated before adding the next. Mix in the vanilla extract.\n",
+      "\n",
+      "5. **Combine Wet and Dry**: With the mixer on low speed, alternately add the dry ingredients mixture and milk into the wet mixture in three parts, starting and ending with the flour mixture. Be careful not to overmix; stop as soon as the ingredients are just combined.\n",
+      "\n",
+      "6. **Pour and Bake**: Divide the batter evenly between the prepared pans. Smooth the tops with a spatula. Bake for 25-30 minutes, or until a toothpick inserted into the center of each cake comes out clean.\n",
+      "\n",
+      "7. **Cool the Cake**: Remove cakes from the oven and let them cool in their pans for about 10 minutes. Then, remove from the pans and transfer onto wire racks to cool completely.\n",
+      "\n",
+      "8. **Frosting and Assembly**: Once cooled, you can frost and assemble the cake with your favorite frosting, such as buttercream. Place one cake layer on a plate or cake stand, spread frosting over the top, then place the second cake layer on top. Frost the top and sides of the cake as desired.\n",
+      "\n",
+      "Remember, this is a basic vanilla cake recipe that can be customized with different flavors or toppings. Always read through the entire recipe before beginning and ensure all ingredients are at room temperature for best results.\n"
+     ]
+    }
+   ],
+   "source": [
+    "resp = dashscope_llm.complete(\"How to make cake?\")\n",
+    "print(resp)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "#### Call `stream_complete`` with a prompt"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Baking a cake can be a fun and rewarding experience! Here's a simple recipe for a classic vanilla sponge cake:\n",
+      "\n",
+      "**Ingredients:**\n",
+      "- 2 cups (240g) all-purpose flour\n",
+      "- 2 teaspoons baking powder\n",
+      "- 1/2 teaspoon salt\n",
+      "- 1 cup (2 sticks or 226g) unsalted butter, at room temperature\n",
+      "- 1 3/4 cups (350g) granulated sugar\n",
+      "- 4 large eggs, at room temperature\n",
+      "- 2 teaspoons pure vanilla extract\n",
+      "- 1 1/4 cups (300ml) whole milk, at room temperature\n",
+      "\n",
+      "**Instructions:**\n",
+      "\n",
+      "1. **Preheat the Oven**: Preheat your oven to 350°F (175°C). Grease two 9-inch (23cm) round cake pans with butter or cooking spray, then line the bottoms with parchment paper.\n",
+      "\n",
+      "2. **Mix Dry Ingredients**: In a medium bowl, sift together the flour, baking powder, and salt. Set aside.\n",
+      "\n",
+      "3. **Cream Butter and Sugar**: In a large mixing bowl or using a stand mixer, cream the butter until it is light and fluffy. Gradually add in the sugar and continue beating until the mixture is pale and well-combined, about 3-5 minutes.\n",
+      "\n",
+      "4. **Add Eggs and Vanilla**: Beat in the eggs one at a time, making sure each egg is fully incorporated before adding the next. Add the vanilla extract and mix well.\n",
+      "\n",
+      "5. **Combine Wet and Dry**: With the mixer on low speed, alternately add the dry ingredients mixture and the milk to the butter-sugar mixture, starting and ending with the dry ingredients. Mix just until combined; do not overmix.\n",
+      "\n",
+      "6. **Pour and Bake**: Divide the batter evenly between the prepared pans. Smooth the tops with a spatula. Bake for 25-30 minutes or until a toothpick inserted into the center of the cakes comes out clean.\n",
+      "\n",
+      "7. **Cool and Frost**: Remove the cakes from the oven and let them cool in their pans for about 10 minutes. Then, remove the cakes from the pans and transfer to a wire rack to cool completely. Once cooled, you can frost and decorate as desired.\n",
+      "\n",
+      "For frosting, you could make a basic buttercream by beating softened butter with powdered sugar, vanilla, and a little milk until smooth and spreadable.\n",
+      "\n",
+      "Remember that every oven behaves differently, so keep an eye on your cake while it bakes. Enjoy your homemade cake!\n",
+      "\n",
+      "**Optional Steps:**\n",
+      "- For extra flavor, you can add lemon zest, cocoa powder, or other flavorings to the batter.\n",
+      "- Level the cakes with a serrated knife before stacking and frosting to ensure an even surface.\n",
+      "- Chill the cakes before frosting to help prevent crumbs from getting mixed into the frosting."
+     ]
+    }
+   ],
+   "source": [
+    "responses = dashscope_llm.stream_complete(\"How to make cake?\")\n",
+    "for response in responses:\n",
+    "    print(response.delta, end=\"\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "#### Call `chat` with a list of messages"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "assistant: Baking a cake from scratch is a fun and rewarding experience! Here's a simple recipe for a classic vanilla cake:\n",
+      "\n",
+      "**Ingredients:**\n",
+      "- 2 and 1/4 cups (280g) all-purpose flour\n",
+      "- 1 and 1/2 cups (300g) granulated sugar\n",
+      "- 3 teaspoons baking powder\n",
+      "- 1/2 teaspoon salt\n",
+      "- 1 cup (2 sticks or 226g) unsalted butter, at room temperature\n",
+      "- 1 cup (240ml) whole milk, at room temperature\n",
+      "- 4 large eggs, at room temperature\n",
+      "- 2 teaspoons pure vanilla extract\n",
+      "\n",
+      "**For the frosting (optional):**\n",
+      "- 1 cup (2 sticks or 226g) unsalted butter, softened\n",
+      "- 4 cups (480g) powdered sugar\n",
+      "- 2 teaspoons vanilla extract\n",
+      "- 3-5 tablespoons milk\n",
+      "\n",
+      "**Instructions:**\n",
+      "\n",
+      "1. **Preheat Oven and Prepare Pan**: Preheat your oven to 350°F (175°C). Grease two 9-inch round cake pans with butter or cooking spray. Dust them lightly with flour and tap out any excess.\n",
+      "\n",
+      "2. **Mix Dry Ingredients**: In a large mixing bowl, whisk together the flour, sugar, baking powder, and salt until well combined.\n",
+      "\n",
+      "3. **Cream Butter and Sugar**: In another large bowl, using an electric mixer on medium speed, beat the butter until creamy. Gradually add in the sugar and continue beating until light and fluffy, about 3-5 minutes.\n",
+      "\n",
+      "4. **Add Eggs and Vanilla**: Beat in the eggs one at a time, making sure each egg is fully incorporated before adding the next. Stir in the vanilla extract.\n",
+      "\n",
+      "5. **Alternate Wet and Dry Ingredients**: With the mixer on low speed, alternately add the dry ingredients mixture and milk to the wet mixture, starting and ending with the dry ingredients. Mix until just combined; do not overmix.\n",
+      "\n",
+      "6. **Pour Batter and Bake**: Divide the batter evenly between the prepared pans. Smooth the tops. Bake for 25-30 minutes or until a toothpick inserted into the center of the cakes comes out clean.\n",
+      "\n",
+      "7. **Cool Cakes**: Remove cakes from the oven and let them cool in the pans for about 10 minutes. Then, carefully invert the cakes onto wire racks to cool completely.\n",
+      "\n",
+      "8. **Make Frosting (Optional)**: In a separate bowl, beat butter until smooth. Gradually add powdered sugar, vanilla extract, and enough milk to reach your desired consistency. Beat until smooth and creamy.\n",
+      "\n",
+      "9. **Assemble Cake**: Once the cakes are cooled, place one layer on a plate or cake stand. Spread a thick layer of frosting on top. Place the second cake layer on top and cover the entire cake with the remaining frosting.\n",
+      "\n",
+      "Enjoy your homemade vanilla cake!\n",
+      "\n",
+      "Remember, this is just a basic recipe. You can customize it by adding food coloring, different extracts, or mix-ins like chocolate chips or fruit. Always be mindful of adjusting baking times if you're using different sized pans.\n"
+     ]
+    }
+   ],
+   "source": [
+    "from llama_index.core.base.llms.types import MessageRole, ChatMessage\n",
+    "\n",
+    "messages = [\n",
+    "    ChatMessage(\n",
+    "        role=MessageRole.SYSTEM, content=\"You are a helpful assistant.\"\n",
+    "    ),\n",
+    "    ChatMessage(role=MessageRole.USER, content=\"How to make cake?\"),\n",
+    "]\n",
+    "resp = dashscope_llm.chat(messages)\n",
+    "print(resp)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "#### Using `stream_chat` "
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Baking a cake from scratch is a fun and delicious activity! Here's a simple recipe for a classic vanilla cake:\n",
+      "\n",
+      "**Ingredients:**\n",
+      "- 2 cups (240g) all-purpose flour\n",
+      "- 2 teaspoons baking powder\n",
+      "- 1/2 teaspoon baking soda\n",
+      "- 1/2 teaspoon salt\n",
+      "- 1 cup (2 sticks or 226g) unsalted butter, at room temperature\n",
+      "- 1 3/4 cups (350g) granulated sugar\n",
+      "- 4 large eggs, at room temperature\n",
+      "- 2 teaspoons pure vanilla extract\n",
+      "- 1 cup (240ml) whole milk, at room temperature\n",
+      "\n",
+      "**Instructions:**\n",
+      "\n",
+      "1. **Preheat the Oven**: Preheat your oven to 350°F (175°C). Grease two 9-inch round cake pans with butter or cooking spray and line the bottoms with parchment paper.\n",
+      "\n",
+      "2. **Dry Ingredients Mix**: In a medium bowl, sift together the flour, baking powder, baking soda, and salt. Set aside.\n",
+      "\n",
+      "3. **Creaming Butter and Sugar**: In a large mixing bowl or using a stand mixer, beat the softened butter until creamy. Gradually add in the sugar and continue beating until light and fluffy, about 3-5 minutes.\n",
+      "\n",
+      "4. **Egg Incorporation**: Add the eggs one at a time, beating well after each addition. Stir in the vanilla extract.\n",
+      "\n",
+      "5. **Dry & Wet Ingredients Combine**: Add the dry ingredients mixture to the wet ingredients in three parts, alternating with the milk, beginning and ending with the dry ingredients. Mix on low speed just until combined. Do not overmix; a few lumps are okay.\n",
+      "\n",
+      "6. **Bake**: Divide the batter evenly between the prepared pans. Smooth out the tops with a spatula. Bake for 25-30 minutes or until a toothpick inserted into the center of the cakes comes out clean.\n",
+      "\n",
+      "7. **Cool**: Remove the cakes from the oven and let them cool in their pans for about 10 minutes. Then, invert onto wire racks to cool completely.\n",
+      "\n",
+      "8. **Frosting and Assembly**: Once cooled, you can frost the cake with your choice of frosting - such as a classic buttercream. Place one cake layer on a plate or cake stand, spread frosting over it, then place the second layer on top. Frost the top and sides of the cake as desired.\n",
+      "\n",
+      "Enjoy your homemade vanilla cake!\n",
+      "\n",
+      "**Note:** You can customize this basic recipe by adding food coloring, different flavor extracts, or incorporating mix-ins like chocolate chips or fruit. Adjust baking times if you're making cupcakes or larger cakes."
+     ]
+    }
+   ],
+   "source": [
+    "responses = dashscope_llm.stream_chat(messages)\n",
+    "for response in responses:\n",
+    "    print(response.delta, end=\"\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "#### Multiple rounds conversation."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "assistant: Baking a cake is a fun and creative process! Here's a simple recipe for a classic vanilla sponge cake. You will need:\n",
+      "\n",
+      "Ingredients:\n",
+      "- 1 and 1/2 cups (190g) all-purpose flour\n",
+      "- 1 cup (200g) granulated sugar\n",
+      "- 2 teaspoons baking powder\n",
+      "- 1/2 teaspoon salt\n",
+      "- 1/2 cup (1 stick or 113g) unsalted butter, softened\n",
+      "- 2 large eggs, room temperature\n",
+      "- 2 teaspoons pure vanilla extract\n",
+      "- 3/4 cup (180ml) whole milk, at room temperature\n",
+      "\n",
+      "For the frosting (optional):\n",
+      "- 1 cup (226g) unsalted butter, softened\n",
+      "- 4 cups (400g) powdered sugar\n",
+      "- 2 tablespoons heavy cream or milk\n",
+      "- 2 teaspoons vanilla extract\n",
+      "\n",
+      "Instructions:\n",
+      "\n",
+      "1. Preheat your oven to 350°F (175°C). Grease two 9-inch round cake pans with butter or cooking spray, then line the bottoms with parchment paper.\n",
+      "\n",
+      "2. In a medium bowl, whisk together the flour, sugar, baking powder, and salt until well combined.\n",
+      "\n",
+      "3. In a large mixing bowl, beat the softened butter until creamy. Gradually add in the sugar and continue beating until light and fluffy, about 3 minutes.\n",
+      "\n",
+      "4. Add in the eggs one at a time, beating well after each addition. Mix in the vanilla extract.\n",
+      "\n",
+      "5. With the mixer on low speed, alternate adding the flour mixture and milk, starting and ending with the flour mixture. Beat until just combined, being careful not to overmix.\n",
+      "\n",
+      "6. Divide the batter evenly between the prepared pans.\n",
+      "\n",
+      "7. Bake for 25 to 30 minutes or until a toothpick inserted into the center of each cake comes out clean.\n",
+      "\n",
+      "8. Let the cakes cool in their pans for 10 minutes, then remove them from the pans and place them onto wire racks to cool completely.\n",
+      "\n",
+      "To make the frosting (if desired):\n",
+      "\n",
+      "1. In a large mixing bowl, beat the softened butter until smooth.\n",
+      "2. Gradually add in the powdered sugar, mixing well after each addition.\n",
+      "3. Stir in the heavy cream or milk and vanilla extract. Beat on high speed until the frosting is smooth and creamy.\n",
+      "\n",
+      "Assembling the Cake:\n",
+      "\n",
+      "1. Once the cakes are cooled, place one layer on a plate or cake stand. Spread a generous amount of frosting over the top.\n",
+      "2. Place the second cake layer on top of the frosting, bottom-side up to create a flat surface.\n",
+      "3. Frost the top and sides of the cake with the remaining frosting as desired.\n",
+      "\n",
+      "Enjoy your homemade vanilla sponge cake!\n",
+      "\n",
+      "Remember, this is just a basic recipe and you can customize it by adding food coloring, different flavorings, or toppings to suit your taste. Always follow the specific instructions for more complex recipes or those that use different ingredients like chocolate or fruit.\n",
+      "assistant: To make a cake without sugar, you can substitute sugar with alternative natural sweeteners. Here's a recipe for a simple sugar-free vanilla sponge cake using honey as a sweetener:\n",
+      "\n",
+      "Ingredients:\n",
+      "- 1 and 1/2 cups (190g) all-purpose flour\n",
+      "- 3/4 cup (225g) honey (use mild-flavored honey like clover or acacia)\n",
+      "- 2 teaspoons baking powder\n",
+      "- 1/2 teaspoon salt\n",
+      "- 1/2 cup (1 stick or 113g) unsalted butter, softened\n",
+      "- 2 large eggs, room temperature\n",
+      "- 2 teaspoons pure vanilla extract\n",
+      "- 3/4 cup (180ml) whole milk, at room temperature\n",
+      "\n",
+      "Instructions:\n",
+      "\n",
+      "1. Preheat your oven to 350°F (175°C). Grease two 9-inch round cake pans with butter or cooking spray, then line the bottoms with parchment paper.\n",
+      "\n",
+      "2. In a medium bowl, whisk together the flour, baking powder, and salt.\n",
+      "\n",
+      "3. In a large mixing bowl, beat the softened butter and honey until light and fluffy, about 3-4 minutes. Honey is more liquid than sugar so it may take a little longer to incorporate.\n",
+      "\n",
+      "4. Add in the eggs one at a time, beating well after each addition. Mix in the vanilla extract.\n",
+      "\n",
+      "5. With the mixer on low speed, alternate adding the flour mixture and milk, starting and ending with the flour mixture. Beat until just combined, being careful not to overmix.\n",
+      "\n",
+      "6. Divide the batter evenly between the prepared pans.\n",
+      "\n",
+      "7. Bake for 25 to 30 minutes or until a toothpick inserted into the center of each cake comes out clean.\n",
+      "\n",
+      "8. Let the cakes cool in their pans for 10 minutes, then remove them from the pans and place them onto wire racks to cool completely.\n",
+      "\n",
+      "Note: Baking with honey can result in a moister cake that browns faster. You might need to adjust the baking time accordingly and monitor the color of the cake while it bakes.\n",
+      "\n",
+      "Keep in mind that honey is still a form of carbohydrate and has calories. For a completely sugar-free option, you could use erythritol or stevia-based sweeteners, but be aware that these will affect the texture and taste differently compared to traditional sugar or honey. Adjust the amount according to the specific sweetener’s conversion ratio to sugar, and follow the instructions provided by the manufacturer.\n",
+      "\n",
+      "For frosting, you can also create a sugar-free version using cream cheese or whipped cream sweetened with a sugar substitute, if desired.\n"
+     ]
+    }
+   ],
+   "source": [
+    "messages = [\n",
+    "    ChatMessage(\n",
+    "        role=MessageRole.SYSTEM, content=\"You are a helpful assistant.\"\n",
+    "    ),\n",
+    "    ChatMessage(role=MessageRole.USER, content=\"How to make cake?\"),\n",
+    "]\n",
+    "# first round\n",
+    "resp = dashscope_llm.chat(messages)\n",
+    "print(resp)\n",
+    "\n",
+    "# add response to messages.\n",
+    "messages.append(\n",
+    "    ChatMessage(role=MessageRole.ASSISTANT, content=resp.message.content)\n",
+    ")\n",
+    "\n",
+    "messages.append(\n",
+    "    ChatMessage(role=MessageRole.USER, content=\"How to make it without sugar\")\n",
+    ")\n",
+    "# second round\n",
+    "resp = dashscope_llm.chat(messages)\n",
+    "print(resp)"
+   ]
+  }
+ ],
+ "metadata": {
+  "colab": {
+   "name": "gemini.ipynb",
+   "toc_visible": true
+  },
+  "kernelspec": {
+   "display_name": "Python 3",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 0
+}
diff --git a/docs/examples/multi_modal/dashscope_multi_modal.ipynb b/docs/examples/multi_modal/dashscope_multi_modal.ipynb
new file mode 100644
index 0000000000..88b2a6e557
--- /dev/null
+++ b/docs/examples/multi_modal/dashscope_multi_modal.ipynb
@@ -0,0 +1,315 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "id": "368686b4-f487-4dd4-aeff-37823976529d",
+   "metadata": {},
+   "source": [
+    "<a href=\"https://colab.research.google.com/github/run-llama/llama_index/blob/main/docs/examples/multi_modal/dashscope_multi_modal.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>\n",
+    "\n",
+    "# Multi-Modal LLM using DashScope qwen-vl model for image reasoning\n",
+    "\n",
+    "In this notebook, we show how to use DashScope qwen-vl MultiModal LLM class/abstraction for image understanding/reasoning.\n",
+    "Async is not currently supported\n",
+    "\n",
+    "We also show several functions we are now supporting for DashScope LLM:\n",
+    "* `complete` (sync): for a single prompt and list of images\n",
+    "* `chat` (sync): for multiple chat messages\n",
+    "* `stream complete` (sync): for steaming output of complete\n",
+    "* `stream chat` (sync): for steaming output of chat\n",
+    "* multi round conversation."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "fc691ca8",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "!pip install -U llama-index-multi-modal-llms-dashscope"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "4479bf64",
+   "metadata": {},
+   "source": [
+    "##  Use DashScope to understand Images from URLs"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "5455d8c6",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Set API key\n",
+    "%env DASHSCOPE_API_KEY=YOUR_DASHSCOPE_API_KEY"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "3d0d083e",
+   "metadata": {},
+   "source": [
+    "## Initialize `DashScopeMultiModal` and Load Images from URLs"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "8725b6d2",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from llama_index.multi_modal_llms.dashscope import (\n",
+    "    DashScopeMultiModal,\n",
+    "    DashScopeMultiModalModels,\n",
+    ")\n",
+    "\n",
+    "from llama_index.core.multi_modal_llms.generic_utils import load_image_urls\n",
+    "\n",
+    "\n",
+    "image_urls = [\n",
+    "    \"https://dashscope.oss-cn-beijing.aliyuncs.com/images/dog_and_girl.jpeg\",\n",
+    "]\n",
+    "\n",
+    "image_documents = load_image_urls(image_urls)\n",
+    "\n",
+    "dashscope_multi_modal_llm = DashScopeMultiModal(\n",
+    "    model_name=DashScopeMultiModalModels.QWEN_VL_MAX,\n",
+    ")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "fbd9c116",
+   "metadata": {},
+   "source": [
+    "### Complete a prompt with images"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "c96ab53e",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "The image captures a serene moment on a sandy beach at sunset. A woman, dressed in a blue and white plaid shirt, is seated on the ground. She is holding a treat in her hand, which is being gently taken by a dog. The dog, wearing a blue harness, is sitting next to the woman, its paw resting on her leg. The backdrop of this heartwarming scene is the vast ocean, with the sun setting in the distance, casting a warm glow over the entire landscape. The image beautifully encapsulates the bond between the woman and her dog, set against the tranquil beauty of nature.\n"
+     ]
+    }
+   ],
+   "source": [
+    "complete_response = dashscope_multi_modal_llm.complete(\n",
+    "    prompt=\"What's in the image?\",\n",
+    "    image_documents=image_documents,\n",
+    ")\n",
+    "print(complete_response)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "e043cc59",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "There is a dog in Picture 1, and there is a panda in Picture 2.\n"
+     ]
+    }
+   ],
+   "source": [
+    "### Complete a prompt with multi images\n",
+    "multi_image_urls = [\n",
+    "    \"https://dashscope.oss-cn-beijing.aliyuncs.com/images/dog_and_girl.jpeg\",\n",
+    "    \"https://dashscope.oss-cn-beijing.aliyuncs.com/images/panda.jpeg\",\n",
+    "]\n",
+    "\n",
+    "multi_image_documents = load_image_urls(multi_image_urls)\n",
+    "complete_response = dashscope_multi_modal_llm.complete(\n",
+    "    prompt=\"What animals are in the pictures?\",\n",
+    "    image_documents=multi_image_documents,\n",
+    ")\n",
+    "print(complete_response)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "26ff28b6",
+   "metadata": {},
+   "source": [
+    "### Steam Complete a prompt with a bunch of images"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "eab28aa6",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "The image captures a serene moment on a sandy beach at sunset. A woman, dressed in a blue and white plaid shirt, is seated on the ground. She is holding a treat in her hand, which is being gently taken by a dog. The dog, wearing a blue harness, is sitting next to the woman, its paw resting on her leg. The backdrop of this heartwarming scene is the vast ocean, with the sun setting in the distance, casting a warm glow over the entire landscape. The image beautifully encapsulates the bond between the woman and her dog, set against the tranquil beauty of nature."
+     ]
+    }
+   ],
+   "source": [
+    "stream_complete_response = dashscope_multi_modal_llm.stream_complete(\n",
+    "    prompt=\"What's in the image?\",\n",
+    "    image_documents=image_documents,\n",
+    ")\n",
+    "\n",
+    "for r in stream_complete_response:\n",
+    "    print(r.delta, end=\"\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "a6cc9d04",
+   "metadata": {},
+   "source": [
+    "### multi round conversation with chat messages"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "555bb503",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "The image shows two photos of a panda sitting on a wooden log in an enclosure. In the top photo, the panda is sitting upright with its front paws on the log, facing three crows that are perched on the log. The panda looks alert and curious, while the crows seem to be observing the panda. In the bottom photo, the panda is lying down on the log, its head resting on its front paws. One crow has landed on the ground next to the log, and it seems to be interacting with the panda. The background of the photo shows green plants and a wire fence, creating a natural and relaxed atmosphere.\n",
+      "The woman is sitting on the beach with her dog, and they are giving each other high fives. The panda and the crows are sitting together on a log, and the panda seems to be communicating with the crows.\n"
+     ]
+    }
+   ],
+   "source": [
+    "from llama_index.core.base.llms.types import MessageRole\n",
+    "from llama_index.multi_modal_llms.dashscope.utils import (\n",
+    "    create_dashscope_multi_modal_chat_message,\n",
+    ")\n",
+    "\n",
+    "chat_message_user_1 = create_dashscope_multi_modal_chat_message(\n",
+    "    \"What's in the image?\", MessageRole.USER, image_documents\n",
+    ")\n",
+    "chat_response = dashscope_multi_modal_llm.chat([chat_message_user_1])\n",
+    "print(chat_response.message.content[0][\"text\"])\n",
+    "chat_message_assistent_1 = create_dashscope_multi_modal_chat_message(\n",
+    "    chat_response.message.content[0][\"text\"], MessageRole.ASSISTANT, None\n",
+    ")\n",
+    "chat_message_user_2 = create_dashscope_multi_modal_chat_message(\n",
+    "    \"what are they doing?\", MessageRole.USER, None\n",
+    ")\n",
+    "chat_response = dashscope_multi_modal_llm.chat(\n",
+    "    [chat_message_user_1, chat_message_assistent_1, chat_message_user_2]\n",
+    ")\n",
+    "print(chat_response.message.content[0][\"text\"])"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "aa8b719f",
+   "metadata": {},
+   "source": [
+    "### Stream Chat through a list of chat messages"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "c23d7140",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "The woman is sitting on the beach, holding a treat in her hand, while the dog is sitting next to her, taking the treat from her hand."
+     ]
+    }
+   ],
+   "source": [
+    "stream_chat_response = dashscope_multi_modal_llm.stream_chat(\n",
+    "    [chat_message_user_1, chat_message_assistent_1, chat_message_user_2]\n",
+    ")\n",
+    "for r in stream_chat_response:\n",
+    "    print(r.delta, end=\"\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "c8738293",
+   "metadata": {},
+   "source": [
+    "###  Use images from local files\n",
+    " Use local file:  \n",
+    "    Linux&mac file schema: file:///home/images/test.png  \n",
+    "    Windows file schema: file://D:/images/abc.png  "
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "5e91ec1b",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "There is a dog in Picture 1, and there is a panda in Picture 2.\n"
+     ]
+    }
+   ],
+   "source": [
+    "from llama_index.multi_modal_llms.dashscope.utils import load_local_images\n",
+    "\n",
+    "local_images = [\n",
+    "    \"file://THE_FILE_PATH1\",\n",
+    "    \"file://THE_FILE_PATH2\",\n",
+    "]\n",
+    "\n",
+    "image_documents = load_local_images(local_images)\n",
+    "chat_message_local = create_dashscope_multi_modal_chat_message(\n",
+    "    \"What animals are in the pictures?\", MessageRole.USER, image_documents\n",
+    ")\n",
+    "chat_response = dashscope_multi_modal_llm.chat([chat_message_local])\n",
+    "print(chat_response.message.content[0][\"text\"])"
+   ]
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 3 (ipykernel)",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-dashscope/.gitignore b/llama-index-integrations/embeddings/llama-index-embeddings-dashscope/.gitignore
new file mode 100644
index 0000000000..990c18de22
--- /dev/null
+++ b/llama-index-integrations/embeddings/llama-index-embeddings-dashscope/.gitignore
@@ -0,0 +1,153 @@
+llama_index/_static
+.DS_Store
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+bin/
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+etc/
+include/
+lib/
+lib64/
+parts/
+sdist/
+share/
+var/
+wheels/
+pip-wheel-metadata/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+#  Usually these files are written by a python script from a template
+#  before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+.ruff_cache
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+notebooks/
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+.python-version
+
+# pipenv
+#   According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+#   However, in case of collaboration, if having platform-specific dependencies or dependencies
+#   having no cross-platform support, pipenv may install dependencies that don't work, or not
+#   install all needed dependencies.
+#Pipfile.lock
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+pyvenv.cfg
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# Jetbrains
+.idea
+modules/
+*.swp
+
+# VsCode
+.vscode
+
+# pipenv
+Pipfile
+Pipfile.lock
+
+# pyright
+pyrightconfig.json
diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-dashscope/BUILD b/llama-index-integrations/embeddings/llama-index-embeddings-dashscope/BUILD
new file mode 100644
index 0000000000..0896ca890d
--- /dev/null
+++ b/llama-index-integrations/embeddings/llama-index-embeddings-dashscope/BUILD
@@ -0,0 +1,3 @@
+poetry_requirements(
+    name="poetry",
+)
diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-dashscope/Makefile b/llama-index-integrations/embeddings/llama-index-embeddings-dashscope/Makefile
new file mode 100644
index 0000000000..b9eab05aa3
--- /dev/null
+++ b/llama-index-integrations/embeddings/llama-index-embeddings-dashscope/Makefile
@@ -0,0 +1,17 @@
+GIT_ROOT ?= $(shell git rev-parse --show-toplevel)
+
+help:	## Show all Makefile targets.
+	@grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[33m%-30s\033[0m %s\n", $$1, $$2}'
+
+format:	## Run code autoformatters (black).
+	pre-commit install
+	git ls-files | xargs pre-commit run black --files
+
+lint:	## Run linters: pre-commit (black, ruff, codespell) and mypy
+	pre-commit install && git ls-files | xargs pre-commit run --show-diff-on-failure --files
+
+test:	## Run tests via pytest.
+	pytest tests
+
+watch-docs:	## Build and watch documentation.
+	sphinx-autobuild docs/ docs/_build/html --open-browser --watch $(GIT_ROOT)/llama_index/
diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-dashscope/README.md b/llama-index-integrations/embeddings/llama-index-embeddings-dashscope/README.md
new file mode 100644
index 0000000000..da75dd907c
--- /dev/null
+++ b/llama-index-integrations/embeddings/llama-index-embeddings-dashscope/README.md
@@ -0,0 +1 @@
+# LlamaIndex Embeddings Integration: Dashscope
diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-dashscope/llama_index/embeddings/dashscope/BUILD b/llama-index-integrations/embeddings/llama-index-embeddings-dashscope/llama_index/embeddings/dashscope/BUILD
new file mode 100644
index 0000000000..db46e8d6c9
--- /dev/null
+++ b/llama-index-integrations/embeddings/llama-index-embeddings-dashscope/llama_index/embeddings/dashscope/BUILD
@@ -0,0 +1 @@
+python_sources()
diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-dashscope/llama_index/embeddings/dashscope/__init__.py b/llama-index-integrations/embeddings/llama-index-embeddings-dashscope/llama_index/embeddings/dashscope/__init__.py
new file mode 100644
index 0000000000..ccf0f8dc4f
--- /dev/null
+++ b/llama-index-integrations/embeddings/llama-index-embeddings-dashscope/llama_index/embeddings/dashscope/__init__.py
@@ -0,0 +1,16 @@
+from llama_index.embeddings.dashscope.base import (
+    DashScopeEmbedding,
+    DashScopeBatchTextEmbeddingModels,
+    DashScopeMultiModalEmbeddingModels,
+    DashScopeTextEmbeddingModels,
+    DashScopeTextEmbeddingType,
+)
+
+
+__all__ = [
+    DashScopeTextEmbeddingType,
+    DashScopeTextEmbeddingModels,
+    DashScopeBatchTextEmbeddingModels,
+    DashScopeEmbedding,
+    DashScopeMultiModalEmbeddingModels,
+]
diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-dashscope/llama_index/embeddings/dashscope/base.py b/llama-index-integrations/embeddings/llama-index-embeddings-dashscope/llama_index/embeddings/dashscope/base.py
new file mode 100644
index 0000000000..8fa2356f48
--- /dev/null
+++ b/llama-index-integrations/embeddings/llama-index-embeddings-dashscope/llama_index/embeddings/dashscope/base.py
@@ -0,0 +1,307 @@
+"""DashScope embeddings file."""
+
+import logging
+from enum import Enum
+from http import HTTPStatus
+from typing import Any, Dict, List, Optional, Union
+
+from pydantic import PrivateAttr
+
+from llama_index.core.embeddings.multi_modal_base import MultiModalEmbedding
+from llama_index.core.schema import ImageType
+
+logger = logging.getLogger(__name__)
+
+
+class DashScopeTextEmbeddingType(str, Enum):
+    """DashScope TextEmbedding text_type."""
+
+    TEXT_TYPE_QUERY = "query"
+    TEXT_TYPE_DOCUMENT = "document"
+
+
+class DashScopeTextEmbeddingModels(str, Enum):
+    """DashScope TextEmbedding models."""
+
+    TEXT_EMBEDDING_V1 = "text-embedding-v1"
+    TEXT_EMBEDDING_V2 = "text-embedding-v2"
+
+
+class DashScopeBatchTextEmbeddingModels(str, Enum):
+    """DashScope TextEmbedding models."""
+
+    TEXT_EMBEDDING_ASYNC_V1 = "text-embedding-async-v1"
+    TEXT_EMBEDDING_ASYNC_V2 = "text-embedding-async-v2"
+
+
+EMBED_MAX_INPUT_LENGTH = 2048
+EMBED_MAX_BATCH_SIZE = 25
+
+
+class DashScopeMultiModalEmbeddingModels(str, Enum):
+    """DashScope MultiModalEmbedding models."""
+
+    MULTIMODAL_EMBEDDING_ONE_PEACE_V1 = "multimodal-embedding-one-peace-v1"
+
+
+def get_text_embedding(
+    model: str,
+    text: Union[str, List[str]],
+    api_key: Optional[str] = None,
+    **kwargs: Any,
+) -> List[List[float]]:
+    """Call DashScope text embedding.
+       ref: https://help.aliyun.com/zh/dashscope/developer-reference/text-embedding-api-details.
+
+    Args:
+        model (str): The `DashScopeTextEmbeddingModels`
+        text (Union[str, List[str]]): text or list text to embedding.
+
+    Raises:
+        ImportError: need import dashscope
+
+    Returns:
+        List[List[float]]: The list of embedding result, if failed return empty list.
+    """
+    try:
+        import dashscope
+    except ImportError:
+        raise ImportError("DashScope requires `pip install dashscope")
+    if isinstance(text, str):
+        text = [text]
+    embedding_results = []
+    response = dashscope.TextEmbedding.call(
+        model=model, input=text, api_key=api_key, kwargs=kwargs
+    )
+    if response.status_code == HTTPStatus.OK:
+        for emb in response.output["embeddings"]:
+            embedding_results.append(emb["embedding"])
+    else:
+        logger.error("Calling TextEmbedding failed, details: %s" % response)
+
+    return embedding_results
+
+
+def get_batch_text_embedding(
+    model: str, url: str, api_key: Optional[str] = None, **kwargs: Any
+) -> Optional[str]:
+    """Call DashScope batch text embedding.
+
+    Args:
+        model (str): The `DashScopeMultiModalEmbeddingModels`
+        url (str): The url of the file to embedding which with lines of text to embedding.
+
+    Raises:
+        ImportError: Need install dashscope package.
+
+    Returns:
+        str: The url of the embedding result, format ref:
+        https://help.aliyun.com/zh/dashscope/developer-reference/text-embedding-async-api-details
+    """
+    try:
+        import dashscope
+    except ImportError:
+        raise ImportError("DashScope requires `pip install dashscope")
+    response = dashscope.BatchTextEmbedding.call(
+        model=model, url=url, api_key=api_key, kwargs=kwargs
+    )
+    if response.status_code == HTTPStatus.OK:
+        return response.output["url"]
+    else:
+        logger.error("Calling BatchTextEmbedding failed, details: %s" % response)
+        return None
+
+
+def get_multimodal_embedding(
+    model: str, input: list, api_key: Optional[str] = None, **kwargs: Any
+) -> List[float]:
+    """Call DashScope multimodal embedding.
+       ref: https://help.aliyun.com/zh/dashscope/developer-reference/one-peace-multimodal-embedding-api-details.
+
+    Args:
+        model (str): The `DashScopeBatchTextEmbeddingModels`
+        input (str): The input of the embedding, eg:
+             [{'factor': 1, 'text': '你好'},
+             {'factor': 2, 'audio': 'https://dashscope.oss-cn-beijing.aliyuncs.com/audios/cow.flac'},
+             {'factor': 3, 'image': 'https://dashscope.oss-cn-beijing.aliyuncs.com/images/256_1.png'}]
+
+    Raises:
+        ImportError: Need install dashscope package.
+
+    Returns:
+        List[float]: Embedding result, if failed return empty list.
+    """
+    try:
+        import dashscope
+    except ImportError:
+        raise ImportError("DashScope requires `pip install dashscope")
+    response = dashscope.MultiModalEmbedding.call(
+        model=model, input=input, api_key=api_key, kwargs=kwargs
+    )
+    if response.status_code == HTTPStatus.OK:
+        return response.output["embedding"]
+    else:
+        logger.error("Calling MultiModalEmbedding failed, details: %s" % response)
+        return []
+
+
+class DashScopeEmbedding(MultiModalEmbedding):
+    """DashScope class for text embedding.
+
+    Args:
+        model_name (str): Model name for embedding.
+            Defaults to DashScopeTextEmbeddingModels.TEXT_EMBEDDING_V2.
+                Options are:
+
+                - DashScopeTextEmbeddingModels.TEXT_EMBEDDING_V1
+                - DashScopeTextEmbeddingModels.TEXT_EMBEDDING_V2
+        text_type (str): The input type, ['query', 'document'],
+            For asymmetric tasks such as retrieval, in order to achieve better
+            retrieval results, it is recommended to distinguish between query
+            text (query) and base text (document) types, clustering Symmetric
+            tasks such as classification and classification do not need to
+            be specially specified, and the system default
+            value "document" can be used.
+        api_key (str): The DashScope api key.
+    """
+
+    _api_key: Optional[str] = PrivateAttr()
+    _text_type: Optional[str] = PrivateAttr()
+
+    def __init__(
+        self,
+        model_name: str = DashScopeTextEmbeddingModels.TEXT_EMBEDDING_V2,
+        text_type: str = "document",
+        api_key: Optional[str] = None,
+        **kwargs: Any,
+    ) -> None:
+        self._api_key = api_key
+        self._text_type = text_type
+        super().__init__(
+            model_name=model_name,
+            **kwargs,
+        )
+
+    @classmethod
+    def class_name(cls) -> str:
+        return "DashScopeEmbedding"
+
+    def _get_query_embedding(self, query: str) -> List[float]:
+        """Get query embedding."""
+        emb = get_text_embedding(
+            self.model_name,
+            query,
+            api_key=self._api_key,
+            text_type=self._text_type,
+        )
+        if len(emb) > 0:
+            return emb[0]
+        else:
+            return []
+
+    def _get_text_embedding(self, text: str) -> List[float]:
+        """Get text embedding."""
+        emb = get_text_embedding(
+            self.model_name,
+            text,
+            api_key=self._api_key,
+            text_type=self._text_type,
+        )
+        if len(emb) > 0:
+            return emb[0]
+        else:
+            return []
+
+    def _get_text_embeddings(self, texts: List[str]) -> List[List[float]]:
+        """Get text embeddings."""
+        return get_text_embedding(
+            self.model_name,
+            texts,
+            api_key=self._api_key,
+            text_type=self._text_type,
+        )
+
+    # TODO: use proper async methods
+    async def _aget_text_embedding(self, query: str) -> List[float]:
+        """Get text embedding."""
+        return self._get_text_embedding(query)
+
+    # TODO: user proper async methods
+    async def _aget_query_embedding(self, query: str) -> List[float]:
+        """Get query embedding."""
+        return self._get_query_embedding(query)
+
+    def get_batch_query_embedding(self, embedding_file_url: str) -> Optional[str]:
+        """Get batch query embeddings.
+
+        Args:
+            embedding_file_url (str): The url of the file to embedding which with lines of text to embedding.
+
+        Returns:
+            str: The url of the embedding result, format ref:
+                 https://help.aliyun.com/zh/dashscope/developer-reference/text-embedding-async-api-details.
+        """
+        return get_batch_text_embedding(
+            self.model_name,
+            embedding_file_url,
+            api_key=self._api_key,
+            text_type=self._text_type,
+        )
+
+    def get_batch_text_embedding(self, embedding_file_url: str) -> Optional[str]:
+        """Get batch text embeddings.
+
+        Args:
+            embedding_file_url (str): The url of the file to embedding which with lines of text to embedding.
+
+        Returns:
+            str: The url of the embedding result, format ref:
+                 https://help.aliyun.com/zh/dashscope/developer-reference/text-embedding-async-api-details.
+        """
+        return get_batch_text_embedding(
+            self.model_name,
+            embedding_file_url,
+            api_key=self._api_key,
+            text_type=self._text_type,
+        )
+
+    def _get_image_embedding(self, img_file_path: ImageType) -> List[float]:
+        """
+        Embed the input image synchronously.
+        """
+        input = [{"image": img_file_path}]
+        return get_multimodal_embedding(
+            self.model_name, input=input, api_key=self._api_key
+        )
+
+    async def _aget_image_embedding(self, img_file_path: ImageType) -> List[float]:
+        """
+        Embed the input image asynchronously.
+
+        """
+        return self._get_image_embedding(img_file_path=img_file_path)
+
+    def get_multimodal_embedding(
+        self, input: List[Dict], auto_truncation: bool = False
+    ) -> List[float]:
+        """Call DashScope multimodal embedding.
+        ref: https://help.aliyun.com/zh/dashscope/developer-reference/one-peace-multimodal-embedding-api-details.
+
+        Args:
+            input (str): The input of the multimodal embedding, eg:
+                [{'factor': 1, 'text': '你好'},
+                {'factor': 2, 'audio': 'https://dashscope.oss-cn-beijing.aliyuncs.com/audios/cow.flac'},
+                {'factor': 3, 'image': 'https://dashscope.oss-cn-beijing.aliyuncs.com/images/256_1.png'}]
+
+        Raises:
+            ImportError: Need install dashscope package.
+
+        Returns:
+            List[float]: The embedding result
+        """
+        return get_multimodal_embedding(
+            self.model_name,
+            input=input,
+            api_key=self._api_key,
+            auto_truncation=auto_truncation,
+        )
diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-dashscope/pyproject.toml b/llama-index-integrations/embeddings/llama-index-embeddings-dashscope/pyproject.toml
new file mode 100644
index 0000000000..7a9d41fd36
--- /dev/null
+++ b/llama-index-integrations/embeddings/llama-index-embeddings-dashscope/pyproject.toml
@@ -0,0 +1,55 @@
+[build-system]
+build-backend = "poetry.core.masonry.api"
+requires = ["poetry-core"]
+
+[tool.codespell]
+check-filenames = true
+check-hidden = true
+# Feel free to un-skip examples, and experimental, you will just need to
+# work through many typos (--write-changes and --interactive will help)
+skip = "*.csv,*.html,*.json,*.jsonl,*.pdf,*.txt,*.ipynb"
+
+[tool.llamahub]
+classes = ["DashScopeBatchTextEmbeddingModels", "DashScopeEmbedding", "DashScopeMultiModalEmbeddingModels", "DashScopeTextEmbeddingModels", "DashScopeTextEmbeddingType"]
+contains_example = false
+import_path = "llama_index.embeddings.dashscope"
+
+[tool.mypy]
+disallow_untyped_defs = true
+# Remove venv skip when integrated with pre-commit
+exclude = ["_static", "build", "examples", "notebooks", "venv"]
+ignore_missing_imports = true
+python_version = "3.8"
+
+[tool.poetry]
+authors = ["Your Name <you@example.com>"]
+description = "llama-index embeddings dashscope integration"
+license = "MIT"
+name = "llama-index-embeddings-dashscope"
+packages = [{include = "llama_index/"}]
+readme = "README.md"
+version = "0.1.0"
+
+[tool.poetry.dependencies]
+python = ">=3.8.1,<4.0"
+llama-index-core = "^0.10.0"
+dashscope = ">1.10.0"
+
+[tool.poetry.group.dev.dependencies]
+black = {extras = ["jupyter"], version = "<=23.9.1,>=23.7.0"}
+codespell = {extras = ["toml"], version = ">=v2.2.6"}
+ipython = "8.10.0"
+jupyter = "^1.0.0"
+mypy = "0.991"
+pre-commit = "3.2.0"
+pylint = "2.15.10"
+pytest = "7.2.1"
+pytest-mock = "3.11.1"
+ruff = "0.0.292"
+tree-sitter-languages = "^1.8.0"
+types-Deprecated = ">=0.1.0"
+types-PyYAML = "^6.0.12.12"
+types-protobuf = "^4.24.0.4"
+types-redis = "4.5.5.0"
+types-requests = "2.28.11.8"  # TODO: unpin when mypy>0.991
+types-setuptools = "67.1.0.0"
diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-dashscope/tests/BUILD b/llama-index-integrations/embeddings/llama-index-embeddings-dashscope/tests/BUILD
new file mode 100644
index 0000000000..dabf212d7e
--- /dev/null
+++ b/llama-index-integrations/embeddings/llama-index-embeddings-dashscope/tests/BUILD
@@ -0,0 +1 @@
+python_tests()
diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-dashscope/tests/__init__.py b/llama-index-integrations/embeddings/llama-index-embeddings-dashscope/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-dashscope/tests/test_embeddings_dashscope.py b/llama-index-integrations/embeddings/llama-index-embeddings-dashscope/tests/test_embeddings_dashscope.py
new file mode 100644
index 0000000000..1907cbfdf2
--- /dev/null
+++ b/llama-index-integrations/embeddings/llama-index-embeddings-dashscope/tests/test_embeddings_dashscope.py
@@ -0,0 +1,7 @@
+from llama_index.core.embeddings.multi_modal_base import MultiModalEmbedding
+from llama_index.embeddings.dashscope import DashScopeEmbedding
+
+
+def test_dashscope_embedding_class():
+    names_of_base_classes = [b.__name__ for b in DashScopeEmbedding.__mro__]
+    assert MultiModalEmbedding.__name__ in names_of_base_classes
-- 
GitLab