diff --git a/CHANGELOG.md b/CHANGELOG.md
index 96f5a8a1e7713325b18f7bcfda28fb997c68c990..f5239c18334565b87cf77dad404c9d86b6633d0a 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,4 +1,4 @@
-# Changelog
+# ChangeLog
 
 ## [Unreleased]
 
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 431cdbe45a7d3b23d683dd74111f08b95d7fb4e1..caf8a9ad9795f13dc3f62841794b03a1416b08b1 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -1,4 +1,4 @@
-# 💡 Contributing to LlamaIndex
+# Contributing to LlamaIndex
 Interested in contributing to LlamaIndex? Here's how to get started! 
 
 ## Contribution Guideline
@@ -17,7 +17,7 @@ Also, join our Discord for ideas and discussions: https://discord.gg/dGcwcsnxhU.
 
 ### 1. 🆕 Extend Core Modules
 The most impactful way to contribute to LlamaIndex is extending our core modules:
-![LlamaIndex modules](docs/_static/contribution/contrib.png)
+![LlamaIndex modules](https://github.com/jerryjliu/llama_index/raw/main/docs/_static/contribution/contrib.png)
 
 We welcome contributions in _all_ modules shown above.
 So far, we have implemented a core set of functionalities for each.
diff --git a/docs/README.md b/docs/DOCS_README.md
similarity index 98%
rename from docs/README.md
rename to docs/DOCS_README.md
index bbcf904b8a7d30fef3501aa03cde6770868a799b..4a7df91f0acf9a51e5f123e147fd874bac68c34e 100644
--- a/docs/README.md
+++ b/docs/DOCS_README.md
@@ -1,4 +1,4 @@
-# LlamaIndex Documentation
+# Documentation Guide
 
 ## A guide for docs contributors
 
diff --git a/docs/conf.py b/docs/conf.py
index b0bc9352181f03f3a837b910a7b0f3b1cc0aac2c..9ff2ec50843dcb17b3c70316e248fbeea2c691b6 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -39,6 +39,7 @@ extensions = [
     "sphinx.ext.napoleon",
     "sphinx_rtd_theme",
     "sphinx.ext.mathjax",
+    "m2r2",
     "myst_nb",
 ]
 
diff --git a/docs/development/changelog.rst b/docs/development/changelog.rst
new file mode 100644
index 0000000000000000000000000000000000000000..0520a4c2c588e150479a29c607b66a566f1cac22
--- /dev/null
+++ b/docs/development/changelog.rst
@@ -0,0 +1 @@
+.. mdinclude:: ../../CHANGELOG.md
\ No newline at end of file
diff --git a/docs/development/contributing.rst b/docs/development/contributing.rst
new file mode 100644
index 0000000000000000000000000000000000000000..36431a6a4d4dd6ef6fe5567f5af31764a63316ed
--- /dev/null
+++ b/docs/development/contributing.rst
@@ -0,0 +1 @@
+.. mdinclude:: ../../CONTRIBUTING.md
\ No newline at end of file
diff --git a/docs/development/documentation.rst b/docs/development/documentation.rst
new file mode 100644
index 0000000000000000000000000000000000000000..e0b6bc22d957666ad9398e418b1fc39ce49354c1
--- /dev/null
+++ b/docs/development/documentation.rst
@@ -0,0 +1 @@
+.. mdinclude:: ../DOCS_README.md
\ No newline at end of file
diff --git a/docs/how_to/connector/modules.md b/docs/how_to/connector/modules.md
index e3d7ab2e96416d43e26017d2ffd8e858518231c6..f2f9c78f03f21eceac6a0d9e89212e6803829c45 100644
--- a/docs/how_to/connector/modules.md
+++ b/docs/how_to/connector/modules.md
@@ -1,30 +1,30 @@
 # Module Guides
 
+
 ```{toctree}
 ---
 maxdepth: 1
 ---
-
-../examples/data_connectors/PsychicDemo.ipynb
-../examples/data_connectors/DeepLakeReader.ipynb
-../examples/data_connectors/QdrantDemo.ipynb
-../examples/data_connectors/DiscordDemo.ipynb
-../examples/data_connectors/MongoDemo.ipynb
-../examples/data_connectors/ChromaDemo.ipynb
-../examples/data_connectors/MyScaleReaderDemo.ipynb
-../examples/data_connectors/FaissDemo.ipynb
-../examples/data_connectors/ObsidianReaderDemo.ipynb
-../examples/data_connectors/SlackDemo.ipynb
-../examples/data_connectors/WebPageDemo.ipynb
-../examples/data_connectors/PineconeDemo.ipynb
-../examples/data_connectors/MboxReaderDemo.ipynb
-../examples/data_connectors/MilvusReaderDemo.ipynb
-../examples/data_connectors/NotionDemo.ipynb
-../examples/data_connectors/GithubRepositoryReaderDemo.ipynb
-../examples/data_connectors/GoogleDocsDemo.ipynb
-../examples/data_connectors/DatabaseReaderDemo.ipynb
-../examples/data_connectors/TwitterDemo.ipynb
-../examples/data_connectors/WeaviateDemo.ipynb
-../examples/data_connectors/MakeDemo.ipynb
+../../examples/data_connectors/PsychicDemo.ipynb
+../../examples/data_connectors/DeepLakeReader.ipynb
+../../examples/data_connectors/QdrantDemo.ipynb
+../../examples/data_connectors/DiscordDemo.ipynb
+../../examples/data_connectors/MongoDemo.ipynb
+../../examples/data_connectors/ChromaDemo.ipynb
+../../examples/data_connectors/MyScaleReaderDemo.ipynb
+../../examples/data_connectors/FaissDemo.ipynb
+../../examples/data_connectors/ObsidianReaderDemo.ipynb
+../../examples/data_connectors/SlackDemo.ipynb
+../../examples/data_connectors/WebPageDemo.ipynb
+../../examples/data_connectors/PineconeDemo.ipynb
+../../examples/data_connectors/MboxReaderDemo.ipynb
+../../examples/data_connectors/MilvusReaderDemo.ipynb
+../../examples/data_connectors/NotionDemo.ipynb
+../../examples/data_connectors/GithubRepositoryReaderDemo.ipynb
+../../examples/data_connectors/GoogleDocsDemo.ipynb
+../../examples/data_connectors/DatabaseReaderDemo.ipynb
+../../examples/data_connectors/TwitterDemo.ipynb
+../../examples/data_connectors/WeaviateDemo.ipynb
+../../examples/data_connectors/MakeDemo.ipynb
 ```
 
diff --git a/docs/how_to/connector/root.md b/docs/how_to/connector/root.md
index 8775af6f2f666447c9926de73302f6502ff67e39..0f7db575bc6d52f56efa49a4294e73cdee4742c7 100644
--- a/docs/how_to/connector/root.md
+++ b/docs/how_to/connector/root.md
@@ -45,5 +45,5 @@ See below for detailed guides.
 ---
 maxdepth: 2
 ---
-modules.md
+modules.rst
 ```
\ No newline at end of file
diff --git a/docs/how_to/integrations/vector_stores.md b/docs/how_to/integrations/vector_stores.md
index d5ef26457975bc1a53d9f9f21a1a1fe372f92eac..dc95392c4100aa31b73213ae4d8ae504b1e3e541 100644
--- a/docs/how_to/integrations/vector_stores.md
+++ b/docs/how_to/integrations/vector_stores.md
@@ -326,7 +326,7 @@ index = VectorStoreIndex.from_documents(uber_docs, storage_context=storage_conte
 
 ## Loading Data from Vector Stores using Data Connector
 
-LlamaIndex supports oading data from the following sources. See [Data Connectors](/how_to/data_connectors.md) for more details and API documentation.
+LlamaIndex supports loading data from the following sources. See [Data Connectors](../connector/root.md) for more details and API documentation.
 
 Chroma stores both documents and vectors. This is an example of how to use Chroma:
 
diff --git a/docs/how_to/query_engine/advanced/query_transformations.md b/docs/how_to/query_engine/advanced/query_transformations.md
index 81690c7272dc690ae9ded832d7a6fd10e9938975..a09b8cf46bdb3053a43e3c37d376235d87ddde3c 100644
--- a/docs/how_to/query_engine/advanced/query_transformations.md
+++ b/docs/how_to/query_engine/advanced/query_transformations.md
@@ -43,7 +43,7 @@ print(response)
 
 ```
 
-Check out our [example notebook](../../examples/query_transformations/HyDEQueryTransformDemo.ipynb) for a full walkthrough.
+Check out our [example notebook](../../../examples/query_transformations/HyDEQueryTransformDemo.ipynb) for a full walkthrough.
 
 
 ### Single-Step Query Decomposition
@@ -55,7 +55,7 @@ If your query is complex, different parts of your knowledge base may answer diff
 
 Our single-step query decomposition feature transforms a **complicated** question into a simpler one over the data collection to help provide a sub-answer to the original question.
 
-This is especially helpful over a [composed graph](/how_to/index_structs/composability.md). Within a composed graph, a query can be routed to multiple subindexes, each representing a subset of the overall knowledge corpus. Query decomposition allows us to transform the query into a more suitable question over any given index.
+This is especially helpful over a [composed graph](../../index/composability.md). Within a composed graph, a query can be routed to multiple subindexes, each representing a subset of the overall knowledge corpus. Query decomposition allows us to transform the query into a more suitable question over any given index.
 
 An example image is shown below.
 
diff --git a/docs/how_to/storage/customization.md b/docs/how_to/storage/customization.md
index ab05c97820674ebad7c3198e2f34a875896f7ba2..c4d0092a6c8c04320d02f98cfb610854ec6f78b5 100644
--- a/docs/how_to/storage/customization.md
+++ b/docs/how_to/storage/customization.md
@@ -71,7 +71,7 @@ loaded_indicies = load_index_from_storage(storage_context, index_ids=["<index_id
 You can customize the underlying storage with a one-line change to instantiate different document stores, index stores, and vector stores.
 See [Document Stores](/how_to/storage/docstores.md), [Vector Stores](/how_to/storage/vector_stores.md), [Index Stores](/how_to/storage/index_stores.md) guides for more details.
 
-For saving and loading a graph/composable index, see the [full guide here](/how_to/index_structs/composability.md).
+For saving and loading a graph/composable index, see the [full guide here](../index/composability.md).
 
 ### Vector Store Integrations and Storage
 
diff --git a/docs/index.rst b/docs/index.rst
index cc3024e9f9150608f17d13618753bf4ce5437c35..8897529a154acc92e7a6cad543473fce35e1fb98 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -46,7 +46,6 @@ LlamaIndex provides tools for both beginner users and advanced users. Our high-l
 5 lines of code. Our lower-level APIs allow advanced users to customize and extend any module (data connectors, indices, retrievers, query engines, reranking modules),
 to fit their needs.
 
-
 .. toctree::
    :maxdepth: 1
    :caption: Getting Started
@@ -55,6 +54,14 @@ to fit their needs.
    getting_started/installation.md
    getting_started/starter_example.md
 
+.. toctree::
+   :maxdepth: 2
+   :caption: Development
+   :hidden:
+
+   development/contributing.rst
+   development/documentation.rst
+   development/changelog.rst
 
 .. toctree::
    :maxdepth: 2
diff --git a/docs/reference/query/query_engines/sub_question_query_engine.rst b/docs/reference/query/query_engines/sub_question_query_engine.rst
index ec7d63a024c41fe7a00f2ffe881fef2a983d1f39..26a18853c83e0ae232261bc0e773bfdd6f7a2fd3 100644
--- a/docs/reference/query/query_engines/sub_question_query_engine.rst
+++ b/docs/reference/query/query_engines/sub_question_query_engine.rst
@@ -1,5 +1,5 @@
 Sub Question Query Engine
-=======================
+==========================
 
 .. automodule:: llama_index.query_engine.sub_question_query_engine
    :members:
diff --git a/docs/requirements.txt b/docs/requirements.txt
index f733d7e76c30c72ca2c91ff66a19d66e7452515e..f493b71a19be798dd47a184cfb6013b40b918916 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -2,6 +2,7 @@
 sphinx>=4.3.0
 furo>=2023.3.27
 docutils<0.17
+m2r2
 myst-parser
 myst-nb
 sphinx-autobuild
diff --git a/docs/use_cases/agents.md b/docs/use_cases/agents.md
index fb00ba4354b9f3a85855fadef263e416c2a67ea0..715656474afbf788a9cfc1accea997a094dccdab 100644
--- a/docs/use_cases/agents.md
+++ b/docs/use_cases/agents.md
@@ -49,7 +49,7 @@ Some of these core modules are shown below along with example tutorials (not com
 - [LLM Reranking Guide (Great Gatsby)](/examples/node_postprocessor/LLMReranker-Gatsby.ipynb)
 
 **Chat Engines**
-- [Chat Engines How-To](/how_to/query/chat_engines.md)
+- [Chat Engines How-To](../how_to/chat_engine/root.md)
 
 
 ### Using LlamaIndex as as Tool within an Agent Framework
diff --git a/docs/use_cases/queries.md b/docs/use_cases/queries.md
index 3eb9e89ac1324a1db5043ceeb2785651fccbb63f..805c7cab8f977e41b53692aa4d2cbaf6e3609cbc 100644
--- a/docs/use_cases/queries.md
+++ b/docs/use_cases/queries.md
@@ -86,7 +86,7 @@ response = query_engine.query("<query_str>")
 ```
 
 **Guides**
-- [Composability](/how_to/index_structs/composability.md)
+- [Composability](../how_to/index/composability.md)
 - [City Analysis](../examples/composable_indices/city_analysis/PineconeDemo-CityAnalysis.ipynb) ([Notebook](https://github.com/jerryjliu/llama_index/blob/main/docs/examples/composable_indices/city_analysis/PineconeDemo-CityAnalysis.ipynb))
 
 
@@ -154,7 +154,7 @@ decompose_transform = DecomposeQueryTransform(
 This module will help break down a complex query into a simpler one over your existing index structure.
 
 **Guides**
-- [Query Transformations](/how_to/query/query_transformations.md)
+- [Query Transformations](../how_to/query_engine/advanced/query_transformations.md)
 - [City Analysis Compare/Contrast Example](../examples//composable_indices/city_analysis/City_Analysis-Decompose.ipynb) ([Notebook](https://github.com/jerryjliu/llama_index/blob/main/docs/examples/composable_indices/city_analysis/City_Analysis-Decompose.ipynb))
 
 You can also rely on the LLM to *infer* whether to perform compare/contrast queries (see Multi-Document Queries below).
@@ -214,7 +214,7 @@ the module will first decompose the query into a simpler initial question "What
 query the index, and then ask followup questions.
 
 **Guides**
-- [Query Transformations](/how_to/query/query_transformations.md)
+- [Query Transformations](../how_to/query_engine/advanced/query_transformations.md)
 - [Multi-Step Query Decomposition](../examples/query_transformations/HyDEQueryTransformDemo.ipynb) ([Notebook](https://github.com/jerryjliu/llama_index/blob/main/docs/examples/query_transformations/HyDEQueryTransformDemo.ipynb))
 
 
@@ -225,7 +225,7 @@ LlamaIndex can support queries that require an understanding of time. It can do
 - Sort by recency and filter outdated context.
 
 **Guides**
-- [Second-Stage Postprocessing Guide](/how_to/query/second_stage.md)
+- [Second-Stage Postprocessing Guide](../how_to/query_engine/advanced/second_stage.md)
 - [Prev/Next Postprocessing](../examples/node_postprocessor/PrevNextPostprocessorDemo.ipynb)
 - [Recency Postprocessing](../examples/node_postprocessor/RecencyPostprocessorDemo.ipynb)