diff --git a/docs/docs/community/integrations/vector_stores.md b/docs/docs/community/integrations/vector_stores.md
index 9c9abeaaa03012eb91faf79bdcdaa1f7e820cec8..04f5c27984c03e6d54a46af8ed00a1e2fd98d203 100644
--- a/docs/docs/community/integrations/vector_stores.md
+++ b/docs/docs/community/integrations/vector_stores.md
@@ -36,6 +36,7 @@ as the storage backend for `VectorStoreIndex`.
 - Qdrant (`QdrantVectorStore`) [Installation](https://qdrant.tech/documentation/install/) [Python Client](https://qdrant.tech/documentation/install/#python-client)
 - LanceDB (`LanceDBVectorStore`) [Installation/Quickstart](https://lancedb.github.io/lancedb/basic/)
 - Redis (`RedisVectorStore`). [Installation](https://redis.io/docs/latest/operate/oss_and_stack/install/install-stack/).
+- Relyt (`RelytVectorStore`). [Quickstart](https://docs.relyt.cn/docs/vector-engine/).
 - Supabase (`SupabaseVectorStore`). [Quickstart](https://supabase.github.io/vecs/api/).
 - TiDB (`TiDBVectorStore`). [Quickstart](../../examples/vector_stores/TiDBVector.ipynb). [Installation](https://tidb.cloud/ai). [Python Client](https://github.com/pingcap/tidb-vector-python).
 - TimeScale (`TimescaleVectorStore`). [Installation](https://github.com/timescale/python-vector).
diff --git a/docs/docs/examples/vector_stores/RelytDemo.ipynb b/docs/docs/examples/vector_stores/RelytDemo.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..cd21f53486747a9237c1a16d415d0bd1a6f47c3c
--- /dev/null
+++ b/docs/docs/examples/vector_stores/RelytDemo.ipynb
@@ -0,0 +1,273 @@
+{
+ "cells": [
+  {
+   "attachments": {},
+   "cell_type": "markdown",
+   "id": "307804a3-c02b-4a57-ac0d-172c30ddc851",
+   "metadata": {},
+   "source": [
+    "# Relyt\n",
+    "\n",
+    "<a href=\"https://colab.research.google.com/github/run-llama/llama_index/blob/main/docs/docs/examples/vector_stores/PGVectoRsDemo.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
+   ]
+  },
+  {
+   "attachments": {},
+   "cell_type": "markdown",
+   "id": "36be66bf",
+   "metadata": {},
+   "source": [
+    "Firstly, you will probably need to install dependencies :"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "a094740d",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "%pip install llama-index-vector-stores-relyt"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "6807106d",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "%pip install llama-index \"pgvecto_rs[sdk]\""
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "6e9642d8-d3aa-49f0-b8e4-4612a716e21f",
+   "metadata": {},
+   "source": [
+    "Then start the relyt as the [official document](https://docs.relyt.cn/docs/vector-engine/use/):"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "a6fe902c-3b17-427c-b039-2d77c597c6c1",
+   "metadata": {},
+   "source": [
+    "Setup the logger."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "d48af8e1",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import logging\n",
+    "import os\n",
+    "import sys\n",
+    "\n",
+    "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
+    "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
+   ]
+  },
+  {
+   "attachments": {},
+   "cell_type": "markdown",
+   "id": "f7010b1d-d1bb-4f08-9309-a328bb4ea396",
+   "metadata": {},
+   "source": [
+    "#### Creating a pgvecto_rs client"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "0ce3143d-198c-4dd2-8e5a-c5cdf94f017a",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from pgvecto_rs.sdk import PGVectoRs\n",
+    "\n",
+    "URL = \"postgresql+psycopg://{username}:{password}@{host}:{port}/{db_name}\".format(\n",
+    "    port=os.getenv(\"RELYT_PORT\", \"5432\"),\n",
+    "    host=os.getenv(\"RELYT_HOST\", \"localhost\"),\n",
+    "    username=os.getenv(\"RELYT_USER\", \"postgres\"),\n",
+    "    password=os.getenv(\"RELYT_PASS\", \"mysecretpassword\"),\n",
+    "    db_name=os.getenv(\"RELYT_NAME\", \"postgres\"),\n",
+    ")\n",
+    "\n",
+    "client = PGVectoRs(\n",
+    "    db_url=URL,\n",
+    "    collection_name=\"example\",\n",
+    "    dimension=1536,  # Using OpenAI’s text-embedding-ada-002\n",
+    ")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "c3d7ac82-0ba6-4a32-8dad-3234e42b660a",
+   "metadata": {},
+   "source": [
+    "#### Setup OpenAI"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "4ad14111-0bbb-4c62-906d-6d6253e0cdee",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import os\n",
+    "\n",
+    "os.environ[\"OPENAI_API_KEY\"] = \"sk-...\""
+   ]
+  },
+  {
+   "attachments": {},
+   "cell_type": "markdown",
+   "id": "8ee4473a-094f-4d0a-a825-e1213db07240",
+   "metadata": {},
+   "source": [
+    "#### Load documents, build the PGVectoRsStore and VectorStoreIndex"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "0a2bcc07",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from IPython.display import Markdown, display\n",
+    "\n",
+    "from llama_index.core import SimpleDirectoryReader, VectorStoreIndex\n",
+    "from llama_index.vector_stores.relyt import RelytVectorStore"
+   ]
+  },
+  {
+   "attachments": {},
+   "cell_type": "markdown",
+   "id": "7d782f76",
+   "metadata": {},
+   "source": [
+    "Download Data"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "5104674e",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "!mkdir -p 'data/paul_graham/'\n",
+    "!wget 'https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/paul_graham/paul_graham_essay.txt' -O 'data/paul_graham/paul_graham_essay.txt'"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "68cbd239-880e-41a3-98d8-dbb3fab55431",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# load documents\n",
+    "documents = SimpleDirectoryReader(\"./data/paul_graham\").load_data()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "ba1558b3",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# initialize without metadata filter\n",
+    "from llama_index.core import StorageContext\n",
+    "\n",
+    "vector_store = RelytVectorStore(client=client)\n",
+    "storage_context = StorageContext.from_defaults(vector_store=vector_store)\n",
+    "index = VectorStoreIndex.from_documents(\n",
+    "    documents, storage_context=storage_context\n",
+    ")"
+   ]
+  },
+  {
+   "attachments": {},
+   "cell_type": "markdown",
+   "id": "04304299-fc3e-40a0-8600-f50c3292767e",
+   "metadata": {},
+   "source": [
+    "#### Query Index"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "35369eda",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "INFO:httpx:HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n",
+      "HTTP Request: POST https://api.openai.com/v1/embeddings \"HTTP/1.1 200 OK\"\n",
+      "INFO:httpx:HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n",
+      "HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n"
+     ]
+    }
+   ],
+   "source": [
+    "# set Logging to DEBUG for more detailed outputs\n",
+    "query_engine = index.as_query_engine()\n",
+    "response = query_engine.query(\"What did the author do growing up?\")"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "bedbb693-725f-478f-be26-fa7180ea38b2",
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "text/markdown": [
+       "<b>The author, growing up, worked on writing and programming. They wrote short stories and also tried writing programs on an IBM 1401 computer. They later got a microcomputer and started programming more extensively, writing simple games and a word processor.</b>"
+      ],
+      "text/plain": [
+       "<IPython.core.display.Markdown object>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "display(Markdown(f\"<b>{response}</b>\"))"
+   ]
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 3 (ipykernel)",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/llama-index-cli/llama_index/cli/upgrade/mappings.json b/llama-index-cli/llama_index/cli/upgrade/mappings.json
index fd3a01bbc8ca439e84a1826045b46f63cb8685e7..c4bcfce5306576a6b91426ac30a7d4136f9b8f7d 100644
--- a/llama-index-cli/llama_index/cli/upgrade/mappings.json
+++ b/llama-index-cli/llama_index/cli/upgrade/mappings.json
@@ -422,6 +422,7 @@
   "ChatGPTRetrievalPluginClient": "llama_index.vector_stores.chatgpt_plugin",
   "TairVectorStore": "llama_index.vector_stores.tair",
   "RedisVectorStore": "llama_index.vector_stores.redis",
+  "RelytVectorStore": "llama_index.vector_stores.relyt",
   "set_google_config": "llama_index.vector_stores.google",
   "GoogleVectorStore": "llama_index.vector_stores.google",
   "MetalVectorStore": "llama_index.vector_stores.metal",
diff --git a/llama-index-core/llama_index/core/command_line/mappings.json b/llama-index-core/llama_index/core/command_line/mappings.json
index fd3a01bbc8ca439e84a1826045b46f63cb8685e7..c4bcfce5306576a6b91426ac30a7d4136f9b8f7d 100644
--- a/llama-index-core/llama_index/core/command_line/mappings.json
+++ b/llama-index-core/llama_index/core/command_line/mappings.json
@@ -422,6 +422,7 @@
   "ChatGPTRetrievalPluginClient": "llama_index.vector_stores.chatgpt_plugin",
   "TairVectorStore": "llama_index.vector_stores.tair",
   "RedisVectorStore": "llama_index.vector_stores.redis",
+  "RelytVectorStore": "llama_index.vector_stores.relyt",
   "set_google_config": "llama_index.vector_stores.google",
   "GoogleVectorStore": "llama_index.vector_stores.google",
   "MetalVectorStore": "llama_index.vector_stores.metal",
diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-relyt/.gitignore b/llama-index-integrations/vector_stores/llama-index-vector-stores-relyt/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..990c18de229088f55c6c514fd0f2d49981d1b0e7
--- /dev/null
+++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-relyt/.gitignore
@@ -0,0 +1,153 @@
+llama_index/_static
+.DS_Store
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+bin/
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+etc/
+include/
+lib/
+lib64/
+parts/
+sdist/
+share/
+var/
+wheels/
+pip-wheel-metadata/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+#  Usually these files are written by a python script from a template
+#  before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+.ruff_cache
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+notebooks/
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+.python-version
+
+# pipenv
+#   According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+#   However, in case of collaboration, if having platform-specific dependencies or dependencies
+#   having no cross-platform support, pipenv may install dependencies that don't work, or not
+#   install all needed dependencies.
+#Pipfile.lock
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+pyvenv.cfg
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# Jetbrains
+.idea
+modules/
+*.swp
+
+# VsCode
+.vscode
+
+# pipenv
+Pipfile
+Pipfile.lock
+
+# pyright
+pyrightconfig.json
diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-relyt/BUILD b/llama-index-integrations/vector_stores/llama-index-vector-stores-relyt/BUILD
new file mode 100644
index 0000000000000000000000000000000000000000..0896ca890d8bffd60a44fa824f8d57fecd73ee53
--- /dev/null
+++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-relyt/BUILD
@@ -0,0 +1,3 @@
+poetry_requirements(
+    name="poetry",
+)
diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-relyt/Makefile b/llama-index-integrations/vector_stores/llama-index-vector-stores-relyt/Makefile
new file mode 100644
index 0000000000000000000000000000000000000000..b9eab05aa370629a4a3de75df3ff64cd53887b68
--- /dev/null
+++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-relyt/Makefile
@@ -0,0 +1,17 @@
+GIT_ROOT ?= $(shell git rev-parse --show-toplevel)
+
+help:	## Show all Makefile targets.
+	@grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[33m%-30s\033[0m %s\n", $$1, $$2}'
+
+format:	## Run code autoformatters (black).
+	pre-commit install
+	git ls-files | xargs pre-commit run black --files
+
+lint:	## Run linters: pre-commit (black, ruff, codespell) and mypy
+	pre-commit install && git ls-files | xargs pre-commit run --show-diff-on-failure --files
+
+test:	## Run tests via pytest.
+	pytest tests
+
+watch-docs:	## Build and watch documentation.
+	sphinx-autobuild docs/ docs/_build/html --open-browser --watch $(GIT_ROOT)/llama_index/
diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-relyt/README.md b/llama-index-integrations/vector_stores/llama-index-vector-stores-relyt/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..a079186ddb43f709f4e096c21b688619eae30dda
--- /dev/null
+++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-relyt/README.md
@@ -0,0 +1 @@
+# LlamaIndex Vector_Stores Integration: Relyt
diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-relyt/llama_index/vector_stores/relyt/BUILD b/llama-index-integrations/vector_stores/llama-index-vector-stores-relyt/llama_index/vector_stores/relyt/BUILD
new file mode 100644
index 0000000000000000000000000000000000000000..db46e8d6c978c67e301dd6c47bee08c1b3fd141c
--- /dev/null
+++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-relyt/llama_index/vector_stores/relyt/BUILD
@@ -0,0 +1 @@
+python_sources()
diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-relyt/llama_index/vector_stores/relyt/__init__.py b/llama-index-integrations/vector_stores/llama-index-vector-stores-relyt/llama_index/vector_stores/relyt/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..ae2884c520b5956dc580ee4ad882aeec9959ff74
--- /dev/null
+++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-relyt/llama_index/vector_stores/relyt/__init__.py
@@ -0,0 +1,3 @@
+from llama_index.vector_stores.relyt.base import RelytVectorStore
+
+__all__ = ["RelytVectorStore"]
diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-relyt/llama_index/vector_stores/relyt/base.py b/llama-index-integrations/vector_stores/llama-index-vector-stores-relyt/llama_index/vector_stores/relyt/base.py
new file mode 100644
index 0000000000000000000000000000000000000000..12ff4fa3c0b2951e8965c3717103f3f027b1c957
--- /dev/null
+++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-relyt/llama_index/vector_stores/relyt/base.py
@@ -0,0 +1,153 @@
+import logging
+from typing import Any, List
+
+from llama_index.core.bridge.pydantic import PrivateAttr
+from llama_index.core.schema import BaseNode, MetadataMode
+from llama_index.core.vector_stores.types import (
+    BasePydanticVectorStore,
+    VectorStoreQuery,
+    VectorStoreQueryResult,
+)
+from llama_index.core.vector_stores.utils import (
+    metadata_dict_to_node,
+    node_to_metadata_dict,
+)
+from pgvecto_rs.sdk import PGVectoRs, Record
+from pgvecto_rs.sdk.filters import meta_contains
+from sqlalchemy import text
+
+logger = logging.getLogger(__name__)
+import_err_msg = (
+    '`pgvecto_rs.sdk` package not found, please run `pip install "pgvecto_rs[sdk]"`'
+)
+
+
+class RelytVectorStore(BasePydanticVectorStore):
+    """Relyt Vector Store.
+
+    Examples:
+        `pip install llama-index-vector-stores-relyt`
+
+        ```python
+        from llama_index.vector_stores.relyt import RelytVectorStore
+
+        # Setup relyt client
+        from pgvecto_rs.sdk import PGVectoRs
+        import os
+
+        URL = "postgresql+psycopg://{username}:{password}@{host}:{port}/{db_name}".format(
+            port=os.getenv("RELYT_PORT", "5432"),
+            host=os.getenv("RELYT_HOST", "localhost"),
+            username=os.getenv("RELYT_USER", "postgres"),
+            password=os.getenv("RELYT_PASS", "mysecretpassword"),
+            db_name=os.getenv("RELYT_NAME", "postgres"),
+        )
+
+        client = PGVectoRs(
+            db_url=URL,
+            collection_name="example",
+            dimension=1536,  # Using OpenAI’s text-embedding-ada-002
+        )
+
+        # Initialize RelytVectorStore
+        vector_store = RelytVectorStore(client=client)
+        ```
+    """
+
+    stores_text = True
+
+    _client: "PGVectoRs" = PrivateAttr()
+    _collection_name: str = PrivateAttr()
+
+    def __init__(self, client: "PGVectoRs", collection_name: str) -> None:
+        self._client: PGVectoRs = client
+        self._collection_name = collection_name
+        self.init_index()
+        super().__init__()
+
+    @classmethod
+    def class_name(cls) -> str:
+        return "RelytStore"
+
+    def init_index(self):
+        index_name = f"idx_{self._collection_name}_embedding"
+        with self._client._engine.connect() as conn:
+            with conn.begin():
+                index_query = text(
+                    f"""
+                        SELECT 1
+                        FROM pg_indexes
+                        WHERE indexname = '{index_name}';
+                    """
+                )
+                result = conn.execute(index_query).scalar()
+                if not result:
+                    index_statement = text(
+                        f"""
+                            CREATE INDEX {index_name}
+                            ON collection_{self._collection_name}
+                            USING vectors (embedding vector_l2_ops)
+                            WITH (options = $$
+                            optimizing.optimizing_threads = 30
+                            segment.max_growing_segment_size = 2000
+                            segment.max_sealed_segment_size = 30000000
+                            [indexing.hnsw]
+                            m=30
+                            ef_construction=500
+                            $$);
+                        """
+                    )
+                    conn.execute(index_statement)
+
+    @property
+    def client(self) -> Any:
+        return self._client
+
+    def add(
+        self,
+        nodes: List[BaseNode],
+    ) -> List[str]:
+        records = [
+            Record(
+                id=node.id_,
+                text=node.get_content(metadata_mode=MetadataMode.NONE),
+                meta=node_to_metadata_dict(node, remove_text=True),
+                embedding=node.get_embedding(),
+            )
+            for node in nodes
+        ]
+
+        self._client.insert(records)
+        return [node.id_ for node in nodes]
+
+    def delete(self, ref_doc_id: str, **delete_kwargs: Any) -> None:
+        self._client.delete(meta_contains({"ref_doc_id": ref_doc_id}))
+
+    def drop(self) -> None:
+        self._client.drop()
+
+    # TODO: the more filter type(le, ne, ge ...) will add later, after the base api supported,
+    #  now only support eq filter for meta information
+    def query(self, query: VectorStoreQuery, **kwargs: Any) -> VectorStoreQueryResult:
+        results = self._client.search(
+            embedding=query.query_embedding,
+            top_k=query.similarity_top_k,
+            filter=(
+                meta_contains(
+                    {pair.key: pair.value for pair in query.filters.legacy_filters()}
+                )
+                if query.filters is not None
+                else None
+            ),
+        )
+
+        nodes = [
+            metadata_dict_to_node(record.meta, text=record.text)
+            for record, _ in results
+        ]
+
+        return VectorStoreQueryResult(
+            nodes=nodes,
+            similarities=[score for _, score in results],
+            ids=[str(record.id) for record, _ in results],
+        )
diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-relyt/pyproject.toml b/llama-index-integrations/vector_stores/llama-index-vector-stores-relyt/pyproject.toml
new file mode 100644
index 0000000000000000000000000000000000000000..dd00ab1352b4770d29a2752ac02f0dc4a49d351d
--- /dev/null
+++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-relyt/pyproject.toml
@@ -0,0 +1,64 @@
+[build-system]
+build-backend = "poetry.core.masonry.api"
+requires = ["poetry-core"]
+
+[tool.codespell]
+check-filenames = true
+check-hidden = true
+skip = "*.csv,*.html,*.json,*.jsonl,*.pdf,*.txt,*.ipynb"
+
+[tool.llamahub]
+contains_example = false
+import_path = "llama_index.vector_stores.relyt"
+
+[tool.llamahub.class_authors]
+RelytVectorStore = "llama-index"
+
+[tool.mypy]
+disallow_untyped_defs = true
+exclude = ["_static", "build", "examples", "notebooks", "venv"]
+ignore_missing_imports = true
+python_version = "3.8"
+
+[tool.poetry]
+authors = ["Your Name <you@example.com>"]
+description = "llama-index vector_stores relyt integration"
+exclude = ["**/BUILD"]
+license = "MIT"
+name = "llama-index-vector-stores-relyt"
+readme = "README.md"
+version = "0.1.0"
+
+[tool.poetry.dependencies]
+python = ">=3.8.1,<4.0"
+llama-index-core = "^0.10.1"
+pgvecto-rs = {extras = ["sdk"], version = "^0.1.4"}
+sqlalchemy = ">=1.3.12,<3"
+
+[tool.poetry.group.dev.dependencies]
+ipython = "8.10.0"
+jupyter = "^1.0.0"
+mypy = "0.991"
+pre-commit = "3.2.0"
+pylint = "2.15.10"
+pytest = "7.2.1"
+pytest-mock = "3.11.1"
+ruff = "0.0.292"
+tree-sitter-languages = "^1.8.0"
+types-Deprecated = ">=0.1.0"
+types-PyYAML = "^6.0.12.12"
+types-protobuf = "^4.24.0.4"
+types-redis = "4.5.5.0"
+types-requests = "2.28.11.8"
+types-setuptools = "67.1.0.0"
+
+[tool.poetry.group.dev.dependencies.black]
+extras = ["jupyter"]
+version = "<=23.9.1,>=23.7.0"
+
+[tool.poetry.group.dev.dependencies.codespell]
+extras = ["toml"]
+version = ">=v2.2.6"
+
+[[tool.poetry.packages]]
+include = "llama_index/"
diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-relyt/tests/BUILD b/llama-index-integrations/vector_stores/llama-index-vector-stores-relyt/tests/BUILD
new file mode 100644
index 0000000000000000000000000000000000000000..dabf212d7e7162849c24a733909ac4f645d75a31
--- /dev/null
+++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-relyt/tests/BUILD
@@ -0,0 +1 @@
+python_tests()
diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-relyt/tests/__init__.py b/llama-index-integrations/vector_stores/llama-index-vector-stores-relyt/tests/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/llama-index-integrations/vector_stores/llama-index-vector-stores-relyt/tests/test_vector_stores_relyt.py b/llama-index-integrations/vector_stores/llama-index-vector-stores-relyt/tests/test_vector_stores_relyt.py
new file mode 100644
index 0000000000000000000000000000000000000000..e57a2a573d3057c5034887b21caecf1db51b934a
--- /dev/null
+++ b/llama-index-integrations/vector_stores/llama-index-vector-stores-relyt/tests/test_vector_stores_relyt.py
@@ -0,0 +1,7 @@
+from llama_index.core.vector_stores.types import BasePydanticVectorStore
+from llama_index.vector_stores.relyt import RelytVectorStore
+
+
+def test_class():
+    names_of_base_classes = [b.__name__ for b in RelytVectorStore.__mro__]
+    assert BasePydanticVectorStore.__name__ in names_of_base_classes