diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-fastembed/llama_index/embeddings/fastembed/base.py b/llama-index-integrations/embeddings/llama-index-embeddings-fastembed/llama_index/embeddings/fastembed/base.py
index f0785ba06c6e3e19dce2f5cd9a79f8fc33a6d52a..9db6a8c30478698691cc78e63ed17d68fb278626 100644
--- a/llama-index-integrations/embeddings/llama-index-embeddings-fastembed/llama_index/embeddings/fastembed/base.py
+++ b/llama-index-integrations/embeddings/llama-index-embeddings-fastembed/llama_index/embeddings/fastembed/base.py
@@ -4,7 +4,7 @@ import numpy as np
 from llama_index.core.base.embeddings.base import BaseEmbedding
 from llama_index.core.bridge.pydantic import Field, PrivateAttr
 
-from fastembed.embedding import FlagEmbedding
+from fastembed import TextEmbedding
 
 
 class FastEmbedEmbedding(BaseEmbedding):
@@ -51,10 +51,8 @@ class FastEmbedEmbedding(BaseEmbedding):
 
     doc_embed_type: Literal["default", "passage"] = Field(
         "default",
-        description="Type of embedding to use for documents.\n"
-        "'default': Uses FastEmbed's default embedding method.\n"
-        "'passage': Prefixes the text with 'passage' before embedding.\n"
-        "Defaults to 'default'.",
+        description="Type of embedding method to use for documents.\n"
+        "Available options are 'default' and 'passage'.",
     )
 
     _model: Any = PrivateAttr()
@@ -78,7 +76,7 @@ class FastEmbedEmbedding(BaseEmbedding):
             doc_embed_type=doc_embed_type,
         )
 
-        self._model = FlagEmbedding(
+        self._model = TextEmbedding(
             model_name=model_name,
             max_length=max_length,
             cache_dir=cache_dir,
diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-fastembed/pyproject.toml b/llama-index-integrations/embeddings/llama-index-embeddings-fastembed/pyproject.toml
index a962096f48c8f0c85db520db2a140a1e91ff0c5f..e43bf035ece9e3b5d25ae58b45f754f9979908aa 100644
--- a/llama-index-integrations/embeddings/llama-index-embeddings-fastembed/pyproject.toml
+++ b/llama-index-integrations/embeddings/llama-index-embeddings-fastembed/pyproject.toml
@@ -32,7 +32,7 @@ version = "0.1.3"
 [tool.poetry.dependencies]
 python = ">=3.8.1,<3.12"
 llama-index-core = "^0.10.11.post1"
-fastembed = "^0.1.3"
+fastembed = "^0.2.2"
 
 [tool.poetry.group.dev.dependencies]
 ipython = "8.10.0"