diff --git a/apps/next/package.json b/apps/next/package.json
index 0253d9ddfc3f010b66e1cf0d2d589a4a98c3ea7c..8ecc944b11154cb54c7bcaa49d65a5afbde45145 100644
--- a/apps/next/package.json
+++ b/apps/next/package.json
@@ -8,8 +8,9 @@
     "build": "next build",
     "dev": "next dev",
     "start": "next start",
-    "postbuild": "tsx scripts/post-build.mts",
-    "build:docs": "cross-env NODE_OPTIONS=\"--max-old-space-size=8192\" typedoc && tsx scripts/generate-docs.mts"
+    "postbuild": "tsx scripts/post-build.mts && tsx scripts/validate-links.mts",
+    "build:docs": "cross-env NODE_OPTIONS=\"--max-old-space-size=8192\" typedoc && tsx scripts/generate-docs.mts",
+    "validate-links": "tsx scripts/validate-links.mts"
   },
   "dependencies": {
     "@icons-pack/react-simple-icons": "^10.1.0",
diff --git a/apps/next/scripts/validate-links.mts b/apps/next/scripts/validate-links.mts
new file mode 100644
index 0000000000000000000000000000000000000000..cafaee980fad5621bece6f9a6cdecc3cc452010e
--- /dev/null
+++ b/apps/next/scripts/validate-links.mts
@@ -0,0 +1,245 @@
+import glob from "fast-glob";
+import fs from "fs";
+import matter from "gray-matter";
+import path from "path";
+
+const CONTENT_DIR = path.join(process.cwd(), "src/content/docs");
+const BUILD_DIR = path.join(process.cwd(), ".next");
+
+// Regular expression to find internal links
+// This captures Markdown links [text](/docs/path) and href attributes href="/docs/path"
+const INTERNAL_LINK_REGEX = /(?:(?:\]\(|\bhref=["'])\/docs\/([^")]+))/g;
+
+// Regular expression to find relative links
+// This captures relative links like [text](./path) or ![alt](../images/image.png)
+const RELATIVE_LINK_REGEX = /(?:\]\()(?:\s*)(?:\.\.?)\//g;
+
+interface LinkValidationResult {
+  file: string;
+  invalidLinks: Array<{ link: string; line: number }>;
+}
+
+interface RelativeLinkResult {
+  file: string;
+  relativeLinks: Array<{ line: number; lineContent: string }>;
+}
+
+/**
+ * Get all valid documentation routes from the content directory
+ */
+async function getValidRoutes(): Promise<Set<string>> {
+  const mdxFiles = await glob("**/*.mdx", { cwd: CONTENT_DIR });
+
+  const routes = new Set<string>();
+
+  // Add each MDX file as a valid route
+  for (const file of mdxFiles) {
+    // Remove .mdx extension and normalize to route format
+    let route = file.replace(/\.mdx$/, "");
+
+    // Handle index files
+    if (route.endsWith("/index")) {
+      route = route.replace(/\/index$/, "");
+    } else if (route === "index") {
+      route = "";
+    }
+
+    routes.add(route);
+  }
+
+  return routes;
+}
+
+/**
+ * Extract internal links from a MDX file
+ */
+function extractLinksFromFile(
+  filePath: string,
+): Array<{ link: string; line: number }> {
+  const content = fs.readFileSync(filePath, "utf-8");
+  const { content: mdxContent } = matter(content);
+
+  const lines = mdxContent.split("\n");
+  const links: Array<{ link: string; line: number }> = [];
+
+  lines.forEach((line, lineNumber) => {
+    let match;
+    while ((match = INTERNAL_LINK_REGEX.exec(line)) !== null) {
+      if (match[1]) {
+        links.push({
+          link: match[1],
+          line: lineNumber + 1, // 1-based line numbers
+        });
+      }
+    }
+  });
+
+  return links;
+}
+
+/**
+ * Check if a link is an image link
+ */
+function isImageLink(link: string): boolean {
+  // Check for image extensions
+  const imageExtensions = [".png", ".jpg", ".jpeg", ".gif", ".svg", ".webp"];
+  const hasImageExtension = imageExtensions.some((ext) =>
+    link.toLowerCase().endsWith(ext),
+  );
+
+  // Check for markdown image syntax: ![alt](./path)
+  const isMarkdownImage = link.trim().startsWith("!");
+
+  return hasImageExtension || isMarkdownImage;
+}
+
+/**
+ * Extract relative links from a MDX file
+ */
+function findRelativeLinksInFile(
+  filePath: string,
+): Array<{ line: number; lineContent: string }> {
+  const content = fs.readFileSync(filePath, "utf-8");
+  const { content: mdxContent } = matter(content);
+
+  const lines = mdxContent.split("\n");
+  const relativeLinks: Array<{ line: number; lineContent: string }> = [];
+
+  lines.forEach((line, lineNumber) => {
+    // Check for relative links
+    if (RELATIVE_LINK_REGEX.test(line)) {
+      // Reset the regex lastIndex to start from the beginning of the line
+      RELATIVE_LINK_REGEX.lastIndex = 0;
+
+      // Skip image links
+      if (!isImageLink(line)) {
+        relativeLinks.push({
+          line: lineNumber + 1, // 1-based line numbers
+          lineContent: line.trim(),
+        });
+      }
+    }
+  });
+
+  return relativeLinks;
+}
+
+/**
+ * Validate internal links in all MDX files
+ */
+/**
+ * Find relative links in all MDX files
+ */
+async function findRelativeLinks(): Promise<RelativeLinkResult[]> {
+  const mdxFiles = await glob("**/*.mdx", { cwd: CONTENT_DIR });
+  const results: RelativeLinkResult[] = [];
+
+  for (const file of mdxFiles) {
+    const filePath = path.join(CONTENT_DIR, file);
+    const relativeLinks = findRelativeLinksInFile(filePath);
+
+    if (relativeLinks.length > 0) {
+      results.push({
+        file,
+        relativeLinks,
+      });
+    }
+  }
+
+  return results;
+}
+
+async function validateLinks(): Promise<LinkValidationResult[]> {
+  const mdxFiles = await glob("**/*.mdx", { cwd: CONTENT_DIR });
+  const validRoutes = await getValidRoutes();
+
+  const results: LinkValidationResult[] = [];
+
+  for (const file of mdxFiles) {
+    const filePath = path.join(CONTENT_DIR, file);
+    const links = extractLinksFromFile(filePath);
+
+    const invalidLinks = links.filter(({ link }) => {
+      // Check if the link exists in valid routes
+      // First normalize the link (remove any query string or hash)
+      const normalizedLink = link.split("#")[0].split("?")[0];
+
+      // Remove llamaindex/ prefix if it exists as it's the root of the docs
+      let routePath = normalizedLink;
+      if (routePath.startsWith("llamaindex/")) {
+        routePath = routePath.substring("llamaindex/".length);
+      }
+
+      return !validRoutes.has(normalizedLink) && !validRoutes.has(routePath);
+    });
+
+    if (invalidLinks.length > 0) {
+      results.push({
+        file,
+        invalidLinks,
+      });
+    }
+  }
+
+  return results;
+}
+
+/**
+ * Main function to validate links and report errors
+ */
+async function main() {
+  console.log("šŸ” Validating links in documentation...");
+
+  try {
+    // Check for invalid internal links
+    const validationResults: LinkValidationResult[] = [];
+    await validateLinks();
+    // Check for relative links
+    const relativeLinksResults = await findRelativeLinks();
+
+    let hasErrors = false;
+
+    // Report invalid internal links
+    if (validationResults.length > 0) {
+      console.error("āŒ Found invalid internal links:");
+      hasErrors = true;
+
+      for (const result of validationResults) {
+        console.error(`\nFile: ${result.file}`);
+
+        for (const { link, line } of result.invalidLinks) {
+          console.error(`  - Line ${line}: /docs/${link}`);
+        }
+      }
+    }
+
+    // Report relative links
+    if (relativeLinksResults.length > 0) {
+      console.error("\nāŒ Found relative links (use absolute paths instead):");
+      hasErrors = true;
+
+      for (const result of relativeLinksResults) {
+        console.error(`\nFile: ${result.file}`);
+
+        for (const { line, lineContent } of result.relativeLinks) {
+          console.error(`  - Line ${line}: ${lineContent}`);
+        }
+      }
+    }
+
+    if (hasErrors) {
+      // Exit with error code to fail the build
+      process.exit(1);
+    } else {
+      console.log("āœ… All links are valid!");
+    }
+  } catch (error) {
+    console.error("Error validating links:", error);
+    process.exit(1);
+  }
+}
+
+main().catch((error) => {
+  console.error("Unhandled error:", error);
+  process.exit(1);
+});
diff --git a/apps/next/src/content/docs/llamaindex/getting_started/frameworks/cloudflare.mdx b/apps/next/src/content/docs/llamaindex/getting_started/frameworks/cloudflare.mdx
index 1d696ac0e28d954eeee66f35aa9790f8e2317999..27be85b09ea2bf3b7ffc41077a6bd2de36b0efbb 100644
--- a/apps/next/src/content/docs/llamaindex/getting_started/frameworks/cloudflare.mdx
+++ b/apps/next/src/content/docs/llamaindex/getting_started/frameworks/cloudflare.mdx
@@ -14,7 +14,7 @@ Before you start, make sure you have try LlamaIndex.TS in Node.js to make sure y
 
 <Card
   title="Getting Started with LlamaIndex.TS in Node.js"
-  href="/docs/llamaindex/getting_started/setup/node"
+  href="/docs/llamaindex/getting_started/frameworks/node"
 />
 
 Also, you need have the basic understanding of <a href='https://developers.cloudflare.com/workers/'><SiCloudflareworkers className="inline mr-2" color="#F38020" />Cloudflare Worker</a>.
diff --git a/apps/next/src/content/docs/llamaindex/getting_started/frameworks/index.mdx b/apps/next/src/content/docs/llamaindex/getting_started/frameworks/index.mdx
index db7836ab0da004c7d514d391f574ff2e3fd93d9c..533242e3c4287c746fd119e1eff5793fb5d7e4e4 100644
--- a/apps/next/src/content/docs/llamaindex/getting_started/frameworks/index.mdx
+++ b/apps/next/src/content/docs/llamaindex/getting_started/frameworks/index.mdx
@@ -15,28 +15,28 @@ import {
 		<>
 			<SiNodedotjs className="inline" color="#5FA04E" /> Node.js
 		</>
-	} href="/docs/llamaindex/getting_started/setup/node" />
+	} href="/docs/llamaindex/getting_started/frameworks/node" />
 	<Card title={
 		<>
 			<SiTypescript className="inline" color="#3178C6" /> TypeScript
 		</>
-	} href="/docs/llamaindex/getting_started/setup/typescript" />
+	} href="/docs/llamaindex/getting_started/frameworks/typescript" />
 	<Card title={
 		<>
 			<SiVite className='inline' color='#646CFF' /> Vite
 		</>
-	} href="/docs/llamaindex/getting_started/setup/vite" />
+	} href="/docs/llamaindex/getting_started/frameworks/vite" />
 	<Card
 		title={
 			<>
 				<SiNextdotjs className='inline' /> Next.js (React Server Component)
 			</>
 		}
-		href="/docs/llamaindex/getting_started/setup/next"
+		href="/docs/llamaindex/getting_started/frameworks/next"
 	/>
 	<Card title={
 		<>
 			<SiCloudflareworkers className='inline' color='#F38020' /> Cloudflare Workers
 		</>
-	} href="/docs/llamaindex/getting_started/setup/cloudflare" />
+	} href="/docs/llamaindex/getting_started/frameworks/cloudflare" />
 </Cards>
\ No newline at end of file
diff --git a/apps/next/src/content/docs/llamaindex/getting_started/frameworks/next.mdx b/apps/next/src/content/docs/llamaindex/getting_started/frameworks/next.mdx
index 7f7ab87ed5e8e948b278b99ce163719c26fc54c2..60c4e7bf5e0e7bf2c674f1d04f1a354ba21d69bd 100644
--- a/apps/next/src/content/docs/llamaindex/getting_started/frameworks/next.mdx
+++ b/apps/next/src/content/docs/llamaindex/getting_started/frameworks/next.mdx
@@ -7,7 +7,7 @@ Before you start, make sure you have try LlamaIndex.TS in Node.js to make sure y
 
 <Card
   title="Getting Started with LlamaIndex.TS in Node.js"
-  href="/docs/llamaindex/getting_started/setup/node"
+  href="/docs/llamaindex/getting_started/frameworks/node"
 />
 
 ## Differences between Node.js and Next.js
@@ -35,7 +35,7 @@ If you see any dependency issues, you are welcome to open an issue on the GitHub
 
 ## Edge Runtime
 
-[Vercel Edge Runtime](https://edge-runtime.vercel.app/) is a subset of Node.js APIs. Similar to [Cloudflare Workers](./cloudflare#difference-between-nodejs-and-cloudflare-worker),
+[Vercel Edge Runtime](https://edge-runtime.vercel.app/) is a subset of Node.js APIs. Similar to [Cloudflare Workers](/docs/llamaindex/getting_started/frameworks/cloudflare#difference-between-nodejs-and-cloudflare-worker),
 it is a serverless platform that runs your code on the edge.
 
 Not all features of Node.js are supported in Vercel Edge Runtime, so does LlamaIndex.TS, we are working on more compatibility with all JavaScript runtimes.
diff --git a/apps/next/src/content/docs/llamaindex/getting_started/frameworks/node.mdx b/apps/next/src/content/docs/llamaindex/getting_started/frameworks/node.mdx
index d9d56c2433abd8839fda1e42f523aa12f946cd4b..db1d696850d40343560c8499ff75554994b07d74 100644
--- a/apps/next/src/content/docs/llamaindex/getting_started/frameworks/node.mdx
+++ b/apps/next/src/content/docs/llamaindex/getting_started/frameworks/node.mdx
@@ -48,5 +48,5 @@ By the default, we are using `js-tiktoken` for tokenization. You can install `gp
 
 <Card
 	title="Getting Started with LlamaIndex.TS in TypeScript"
-	href="/docs/llamaindex/getting_started/setup/typescript"
+	href="/docs/llamaindex/getting_started/frameworks/typescript"
 />
diff --git a/apps/next/src/content/docs/llamaindex/getting_started/frameworks/vite.mdx b/apps/next/src/content/docs/llamaindex/getting_started/frameworks/vite.mdx
index c75c8fed932b48e5f1d7ba4333032bd661b460c1..388332d84b62dc1e81b39f862c4e2c6a3e6cb77c 100644
--- a/apps/next/src/content/docs/llamaindex/getting_started/frameworks/vite.mdx
+++ b/apps/next/src/content/docs/llamaindex/getting_started/frameworks/vite.mdx
@@ -7,7 +7,7 @@ Before you start, make sure you have try LlamaIndex.TS in Node.js to make sure y
 
 <Card
   title="Getting Started with LlamaIndex.TS in Node.js"
-  href="/docs/llamaindex/getting_started/setup/node"
+  href="/docs/llamaindex/getting_started/frameworks/node"
 />
 
 Also, make sure you have a basic understanding of [Vite](https://vitejs.dev/).
diff --git a/apps/next/src/content/docs/llamaindex/migration/0.8-to-0.9.mdx b/apps/next/src/content/docs/llamaindex/migration/0.8-to-0.9.mdx
index 09c42b4700bca276c30fdf9319fb99f9c5caf009..cf92af850bd9dbb904515e005cff5128be714a49 100644
--- a/apps/next/src/content/docs/llamaindex/migration/0.8-to-0.9.mdx
+++ b/apps/next/src/content/docs/llamaindex/migration/0.8-to-0.9.mdx
@@ -75,7 +75,7 @@ Now:
 import { SimpleDirectoryReader } from "@llamaindex/readers/directory";
 ```
 
-For more details about available data loaders and their usage, check the [Loading Data](/docs/llamaindex/guide/loading).
+For more details about available data loaders and their usage, check the [Loading Data](/docs/llamaindex/modules/loading).
 
 ### 4. Prefer using `llamaindex` instead of `@llamaindex/core`
 
diff --git a/apps/next/src/content/docs/llamaindex/migration/deprecated/agent/index.mdx b/apps/next/src/content/docs/llamaindex/migration/deprecated/agent/index.mdx
index f3c7d6122497fb0872d69b4dfd272c78ff6161a6..dc54264173eee5cc40b0d12f347701e3ff1aea10 100644
--- a/apps/next/src/content/docs/llamaindex/migration/deprecated/agent/index.mdx
+++ b/apps/next/src/content/docs/llamaindex/migration/deprecated/agent/index.mdx
@@ -21,11 +21,6 @@ LlamaIndex.TS comes with a few built-in agents, but you can also create your own
 - ReACT Agent
 - Meta3.1 504B via Bedrock (in `@llamaIndex/community`)
 
-## Examples
-
-- [OpenAI Agent](/docs/llamaindex/examples/agent)
-- [Gemini Agent](/docs/llamaindex/examples/agent_gemini)
-
 ## Api References
 
 - [OpenAIAgent](/docs/api/classes/OpenAIAgent)
diff --git a/apps/next/src/content/docs/llamaindex/modules/agent_workflow.mdx b/apps/next/src/content/docs/llamaindex/modules/agent_workflow.mdx
index 27b5d6495997f9107936db186283bbb31394feb2..00e699eba024b875e6c0acbb1cfd80c8ed07e2fc 100644
--- a/apps/next/src/content/docs/llamaindex/modules/agent_workflow.mdx
+++ b/apps/next/src/content/docs/llamaindex/modules/agent_workflow.mdx
@@ -3,7 +3,7 @@ title: Agent Workflows
 ---
 
 
-Agent Workflows are a powerful system that enables you to create and orchestrate one or multiple agents with tools to perform specific tasks. It's built on top of the base [`Workflow`](./workflows) system and provides a streamlined interface for agent interactions.
+Agent Workflows are a powerful system that enables you to create and orchestrate one or multiple agents with tools to perform specific tasks. It's built on top of the base [`Workflow`](/docs/llamaindex/modules/workflows) system and provides a streamlined interface for agent interactions.
 
 ## Usage
 
diff --git a/apps/next/src/content/docs/llamaindex/modules/chat/rsc.mdx b/apps/next/src/content/docs/llamaindex/modules/chat/rsc.mdx
index 2bb7ed0993c3f6cbffa686745947dd937f51b674..3883d1aeac06bd3b6e13fde5e14a4019f8c8e54e 100644
--- a/apps/next/src/content/docs/llamaindex/modules/chat/rsc.mdx
+++ b/apps/next/src/content/docs/llamaindex/modules/chat/rsc.mdx
@@ -6,7 +6,7 @@ import { ChatDemoRSC } from '../../../../../components/demo/chat/rsc/demo';
 
 Using [chat-ui](https://github.com/run-llama/chat-ui), it's easy to add a chat interface to your LlamaIndexTS application using [Next.js RSC](https://nextjs.org/docs/app/building-your-application/rendering/server-components) and [Vercel AI RSC](https://sdk.vercel.ai/docs/ai-sdk-rsc/overview).
 
-With RSC, the chat messages are not returned as JSON from the server (like when using an [API route](./chat)), instead the chat message components are rendered on the server side.
+With RSC, the chat messages are not returned as JSON from the server (like when using an [API route](/docs/llamaindex/modules/chat/chat)), instead the chat message components are rendered on the server side.
 This is for example useful for rendering a whole chat history on the server before sending it to the client. [Check here](https://sdk.vercel.ai/docs/getting-started/navigating-the-library#when-to-use-ai-sdk-rsc), for a discussion of when to use use RSC.
 
 For implementing a chat interface with RSC, you need to create an AI action and then connect the chat interface to use it.
diff --git a/apps/next/src/content/docs/llamaindex/modules/data_stores/chat_stores/index.mdx b/apps/next/src/content/docs/llamaindex/modules/data_stores/chat_stores/index.mdx
index 7e15a4d7884ed5c649a4c41f48f5dfa1f4df7691..cadcc2ad4170a5b8efca631e072af6dcacb0951e 100644
--- a/apps/next/src/content/docs/llamaindex/modules/data_stores/chat_stores/index.mdx
+++ b/apps/next/src/content/docs/llamaindex/modules/data_stores/chat_stores/index.mdx
@@ -6,7 +6,7 @@ Chat stores manage chat history by storing sequences of messages in a structured
 
 ## Available Chat Stores
 
-- [SimpleChatStore](/docs/api/classes/SimpleChatStore): A simple in-memory chat store with support for [persisting](/docs/llamaindex/modules/data_stores/#local-storage) data to disk.
+- [SimpleChatStore](/docs/api/classes/SimpleChatStore): A simple in-memory chat store with support for [persisting](/docs/llamaindex/modules/data_stores#local-storage) data to disk.
 
 Check the [LlamaIndexTS Github](https://github.com/run-llama/LlamaIndexTS) for the most up to date overview of integrations.
 
diff --git a/apps/next/src/content/docs/llamaindex/modules/data_stores/doc_stores/index.mdx b/apps/next/src/content/docs/llamaindex/modules/data_stores/doc_stores/index.mdx
index 03b97172d158e27abf1e262ae09eb5541339f76b..db50ee11d4e5c6cf6d30e7bd652e915704130fa3 100644
--- a/apps/next/src/content/docs/llamaindex/modules/data_stores/doc_stores/index.mdx
+++ b/apps/next/src/content/docs/llamaindex/modules/data_stores/doc_stores/index.mdx
@@ -2,12 +2,12 @@
 title: Document Stores
 ---
 
-Document stores contain ingested document chunks, i.e. [Node](/docs/llamaindex/modules/documents_and_nodes/index)s.
+Document stores contain ingested document chunks, i.e. [Node](/docs/llamaindex/modules/documents_and_nodes)s.
 
 ## Available Document Stores
 
-- [SimpleDocumentStore](/docs/api/classes/SimpleDocumentStore): A simple in-memory document store with support for [persisting](/docs/llamaindex/modules/data_stores/#local-storage) data to disk.
-- [PostgresDocumentStore](/docs/api/classes/PostgresDocumentStore): A PostgreSQL document store, see [PostgreSQL Storage](/docs/llamaindex/modules/data_stores/#postgresql-storage).
+- [SimpleDocumentStore](/docs/api/classes/SimpleDocumentStore): A simple in-memory document store with support for [persisting](/docs/llamaindex/modules/data_stores#local-storage) data to disk.
+- [PostgresDocumentStore](/docs/api/classes/PostgresDocumentStore): A PostgreSQL document store, see [PostgreSQL Storage](/docs/llamaindex/modules/data_stores#postgresql-storage).
 
 Check the [LlamaIndexTS Github](https://github.com/run-llama/LlamaIndexTS) for the most up to date overview of integrations.
 
diff --git a/apps/next/src/content/docs/llamaindex/modules/data_stores/index_stores/index.mdx b/apps/next/src/content/docs/llamaindex/modules/data_stores/index_stores/index.mdx
index 34effb7fabc925409da3d6baddcb52ba8c52ed9e..69edf48a935c97a12e830a8dc838c637e25bb923 100644
--- a/apps/next/src/content/docs/llamaindex/modules/data_stores/index_stores/index.mdx
+++ b/apps/next/src/content/docs/llamaindex/modules/data_stores/index_stores/index.mdx
@@ -6,8 +6,8 @@ Index stores are underlying storage components that contain metadata(i.e. inform
 
 ## Available Index Stores
 
-- [SimpleIndexStore](/docs/api/classes/SimpleIndexStore): A simple in-memory index store with support for [persisting](/docs/llamaindex/modules/data_stores/#local-storage) data to disk.
-- [PostgresIndexStore](/docs/api/classes/PostgresIndexStore): A PostgreSQL index store, , see [PostgreSQL Storage](/docs/llamaindex/modules/data_stores/#postgresql-storage).
+- [SimpleIndexStore](/docs/api/classes/SimpleIndexStore): A simple in-memory index store with support for [persisting](/docs/llamaindex/modules/data_stores#local-storage) data to disk.
+- [PostgresIndexStore](/docs/api/classes/PostgresIndexStore): A PostgreSQL index store, , see [PostgreSQL Storage](/docs/llamaindex/modules/data_stores#postgresql-storage).
 
 Check the [LlamaIndexTS Github](https://github.com/run-llama/LlamaIndexTS) for the most up to date overview of integrations.
 
diff --git a/apps/next/src/content/docs/llamaindex/modules/data_stores/kv_stores/index.mdx b/apps/next/src/content/docs/llamaindex/modules/data_stores/kv_stores/index.mdx
index 8ba76fe4810f4ecf44d33ee04191c4cf193b6bcb..7e367a360c596b495ed99516ad210ce7581dbdd5 100644
--- a/apps/next/src/content/docs/llamaindex/modules/data_stores/kv_stores/index.mdx
+++ b/apps/next/src/content/docs/llamaindex/modules/data_stores/kv_stores/index.mdx
@@ -2,12 +2,12 @@
 title: Key-Value Stores
 ---
 
-Key-Value Stores represent underlying storage components used in [Document Stores](/docs/llamaindex/modules/data_stores/doc_stores/index) and [Index Stores](/docs/llamaindex/modules/data_stores/index_stores/index)
+Key-Value Stores represent underlying storage components used in [Document Stores](/docs/llamaindex/modules/data_stores/doc_stores) and [Index Stores](/docs/llamaindex/modules/data_stores/index_stores)
 
 ## Available Key-Value Stores
 
-- [SimpleKVStore](/docs/api/classes/SimpleKVStore): A simple Key-Value store with support of [persisting](/docs/llamaindex/modules/data_stores/#local-storage) data to disk.
-- [PostgresKVStore](/docs/api/classes/PostgresKVStore): A PostgreSQL Key-Value store, see [PostgreSQL Storage](/docs/llamaindex/modules/data_stores/#postgresql-storage).
+- [SimpleKVStore](/docs/api/classes/SimpleKVStore): A simple Key-Value store with support of [persisting](/docs/llamaindex/modules/data_stores#local-storage) data to disk.
+- [PostgresKVStore](/docs/api/classes/PostgresKVStore): A PostgreSQL Key-Value store, see [PostgreSQL Storage](/docs/llamaindex/modules/data_stores#postgresql-storage).
 
 Check the [LlamaIndexTS Github](https://github.com/run-llama/LlamaIndexTS) for the most up to date overview of integrations.
 
diff --git a/apps/next/src/content/docs/llamaindex/modules/data_stores/vector_stores/index.mdx b/apps/next/src/content/docs/llamaindex/modules/data_stores/vector_stores/index.mdx
index f78ed4b04c9aa36b76e9745ff38edc0df96936e1..01b513c620308992057b266b01b9e84f10df8309 100644
--- a/apps/next/src/content/docs/llamaindex/modules/data_stores/vector_stores/index.mdx
+++ b/apps/next/src/content/docs/llamaindex/modules/data_stores/vector_stores/index.mdx
@@ -8,7 +8,7 @@ Vector stores save embedding vectors of your ingested document chunks.
 
 Available Vector Stores are shown on the sidebar to the left. Additionally the following integrations exist without separate documentation:
 
-- [SimpleVectorStore](/docs/api/classes/SimpleVectorStore): A simple in-memory vector store with optional [persistance](/docs/llamaindex/modules/data_stores/#local-storage) to disk.
+- [SimpleVectorStore](/docs/api/classes/SimpleVectorStore): A simple in-memory vector store with optional [persistance](/docs/llamaindex/modules/data_stores#local-storage) to disk.
 - [AstraDBVectorStore](/docs/api/classes/AstraDBVectorStore): A cloud-native, scalable Database-as-a-Service built on Apache Cassandra, see [datastax.com](https://www.datastax.com/products/datastax-astra)
 - [ChromaVectorStore](/docs/api/classes/ChromaVectorStore): An open-source vector database, focused on ease of use and performance, see [trychroma.com](https://www.trychroma.com/)
 - [MilvusVectorStore](/docs/api/classes/MilvusVectorStore): An open-source, high-performance, highly scalable vector database, see [milvus.io](https://milvus.io/)
@@ -19,6 +19,3 @@ Available Vector Stores are shown on the sidebar to the left. Additionally the f
 
 Check the [LlamaIndexTS Github](https://github.com/run-llama/LlamaIndexTS) for the most up to date overview of integrations.
 
-## API Reference
-
-- [BaseVectorStore](/docs/api/classes/BaseVectorStore)
diff --git a/apps/next/src/content/docs/llamaindex/modules/evaluation/modules/correctness.mdx b/apps/next/src/content/docs/llamaindex/modules/evaluation/correctness.mdx
similarity index 100%
rename from apps/next/src/content/docs/llamaindex/modules/evaluation/modules/correctness.mdx
rename to apps/next/src/content/docs/llamaindex/modules/evaluation/correctness.mdx
diff --git a/apps/next/src/content/docs/llamaindex/modules/evaluation/modules/faithfulness.mdx b/apps/next/src/content/docs/llamaindex/modules/evaluation/faithfulness.mdx
similarity index 100%
rename from apps/next/src/content/docs/llamaindex/modules/evaluation/modules/faithfulness.mdx
rename to apps/next/src/content/docs/llamaindex/modules/evaluation/faithfulness.mdx
diff --git a/apps/next/src/content/docs/llamaindex/modules/evaluation/modules/relevancy.mdx b/apps/next/src/content/docs/llamaindex/modules/evaluation/relevancy.mdx
similarity index 100%
rename from apps/next/src/content/docs/llamaindex/modules/evaluation/modules/relevancy.mdx
rename to apps/next/src/content/docs/llamaindex/modules/evaluation/relevancy.mdx
diff --git a/apps/next/src/content/docs/llamaindex/modules/ingestion_pipeline/transformations.mdx b/apps/next/src/content/docs/llamaindex/modules/ingestion_pipeline/transformations.mdx
index 70bd297c8ce253ecdc66e60da2aabff6b30b368a..7ddf59d246fe1700d9df1539456436252f78150c 100644
--- a/apps/next/src/content/docs/llamaindex/modules/ingestion_pipeline/transformations.mdx
+++ b/apps/next/src/content/docs/llamaindex/modules/ingestion_pipeline/transformations.mdx
@@ -8,7 +8,7 @@ Currently, the following components are Transformation objects:
 
 - [SentenceSplitter](/docs/api/classes/SentenceSplitter)
 - [MetadataExtractor](/docs/llamaindex/modules/documents_and_nodes/metadata_extraction)
-- [Embeddings](/docs/llamaindex/modules/embeddings/index)
+- [Embeddings](/docs/llamaindex/modules/embeddings)
 
 ## Usage Pattern
 
diff --git a/apps/next/src/content/docs/llamaindex/modules/loading/node-parser.mdx b/apps/next/src/content/docs/llamaindex/modules/loading/node-parser.mdx
index aa362c4213295001cb9821d27d254ac9ad4299f0..c5674d3a0aadbdd56b7f4ce090aff12cfe4c1407 100644
--- a/apps/next/src/content/docs/llamaindex/modules/loading/node-parser.mdx
+++ b/apps/next/src/content/docs/llamaindex/modules/loading/node-parser.mdx
@@ -5,7 +5,7 @@ description: Learn how to use Node Parsers and Text Splitters to extract data fr
 import { CodeNodeParserDemo } from '../../../../../components/demo/code-node-parser.tsx';
 import { Tab, Tabs } from "fumadocs-ui/components/tabs";
 
-Node parsers are a simple abstraction that take a list of documents, and chunk them into `Node` objects, such that each node is a specific chunk of the parent document. When a document is broken into nodes, all of it's attributes are inherited to the children nodes (i.e. `metadata`, text and metadata templates, etc.). You can read more about `Node` and `Document` properties [here](./).
+Node parsers are a simple abstraction that take a list of documents, and chunk them into `Node` objects, such that each node is a specific chunk of the parent document. When a document is broken into nodes, all of it's attributes are inherited to the children nodes (i.e. `metadata`, text and metadata templates, etc.). You can read more about `Node` and `Document` properties [here](/docs/llamaindex/modules/loading).
 
 ## NodeParser
 
diff --git a/apps/next/src/content/docs/llamaindex/tutorials/basic_agent.mdx b/apps/next/src/content/docs/llamaindex/tutorials/basic_agent.mdx
index 85377bee5a928cd2cea04a7976d09c8d491a7af0..a547dc195af9acd8980563defd4cca4e62388c65 100644
--- a/apps/next/src/content/docs/llamaindex/tutorials/basic_agent.mdx
+++ b/apps/next/src/content/docs/llamaindex/tutorials/basic_agent.mdx
@@ -5,7 +5,7 @@ title: Basic Agent
 import { DynamicCodeBlock } from 'fumadocs-ui/components/dynamic-codeblock';
 import CodeSource from "!raw-loader!../../../../../../../examples/agent/openai";
 
-We have a comprehensive, step-by-step [guide to building agents in LlamaIndex.TS](./agents/1_setup) that we recommend to learn what agents are and how to build them for production. But building a basic agent is simple:
+We have a comprehensive, step-by-step [guide to building agents in LlamaIndex.TS](/docs/llamaindex/tutorials/agents/1_setup) that we recommend to learn what agents are and how to build them for production. But building a basic agent is simple:
 
 ## Set up
 
diff --git a/apps/next/src/content/docs/llamaindex/tutorials/meta.json b/apps/next/src/content/docs/llamaindex/tutorials/meta.json
index 6145b9e27f2e0927c1f19b0175d530004091747d..b9e304fb7ea365f98e5c11e772ba633757451726 100644
--- a/apps/next/src/content/docs/llamaindex/tutorials/meta.json
+++ b/apps/next/src/content/docs/llamaindex/tutorials/meta.json
@@ -1,8 +1,8 @@
 {
   "title": "Tutorials",
   "pages": [
-    "rag",
     "basic_agent",
+    "rag",
     "agents",
     "workflow",
     "local_llm",
diff --git a/apps/next/src/content/docs/llamaindex/tutorials/rag/concepts.mdx b/apps/next/src/content/docs/llamaindex/tutorials/rag/concepts.mdx
index 99cc8423e3b26b6475d133aecb907454f7c56768..856ccffd3d296a7ba239f5277d48f29a06903484 100644
--- a/apps/next/src/content/docs/llamaindex/tutorials/rag/concepts.mdx
+++ b/apps/next/src/content/docs/llamaindex/tutorials/rag/concepts.mdx
@@ -30,10 +30,10 @@ LlamaIndex.TS help you prepare the knowledge base with a suite of data connector
 
 ![](./_static/concepts/indexing.jpg)
 
-[**Data Loaders**](/docs/llamaindex/modules/data_loaders/index):
+[**Data Loaders**](/docs/llamaindex/modules/data_loaders):
 A data connector (i.e. `Reader`) ingest data from different data sources and data formats into a simple `Document` representation (text and simple metadata).
 
-[**Documents / Nodes**](/docs/llamaindex/modules/documents_and_nodes/index): A `Document` is a generic container around any data source - for instance, a PDF, an API output, or retrieved data from a database. A `Node` is the atomic unit of data in LlamaIndex and represents a "chunk" of a source `Document`. It's a rich representation that includes metadata and relationships (to other nodes) to enable accurate and expressive retrieval operations.
+[**Documents / Nodes**](/docs/llamaindex/modules/documents_and_nodes): A `Document` is a generic container around any data source - for instance, a PDF, an API output, or retrieved data from a database. A `Node` is the atomic unit of data in LlamaIndex and represents a "chunk" of a source `Document`. It's a rich representation that includes metadata and relationships (to other nodes) to enable accurate and expressive retrieval operations.
 
 [**Data Indexes**](/docs/llamaindex/modules/data_index):
 Once you've ingested your data, LlamaIndex helps you index data into a format that's easy to retrieve.
diff --git a/apps/next/src/content/docs/llamaindex/tutorials/rag/index.mdx b/apps/next/src/content/docs/llamaindex/tutorials/rag/index.mdx
index c253efaaf1652baef040e6d236c473f7608ab3b6..ee91263e1585ae47c82edc4c0056c31a2aad5c90 100644
--- a/apps/next/src/content/docs/llamaindex/tutorials/rag/index.mdx
+++ b/apps/next/src/content/docs/llamaindex/tutorials/rag/index.mdx
@@ -6,7 +6,7 @@ import { DynamicCodeBlock } from 'fumadocs-ui/components/dynamic-codeblock';
 import CodeSource from "!raw-loader!../../../../../../../../examples/vectorIndex";
 import TSConfigSource from "!!raw-loader!../../../../../../../../examples/tsconfig.json";
 
-One of the most common use-cases for LlamaIndex is Retrieval-Augmented Generation or RAG, in which your data is indexed and selectively retrieved to be given to an LLM as source material for responding to a query. You can learn more about the [concepts behind RAG](./rag/concepts).
+One of the most common use-cases for LlamaIndex is Retrieval-Augmented Generation or RAG, in which your data is indexed and selectively retrieved to be given to an LLM as source material for responding to a query. You can learn more about the [concepts behind RAG](/docs/llamaindex/tutorials/rag/concepts).
 
 ## Set up the project
 
@@ -17,9 +17,9 @@ npm init
 npm install -D typescript @types/node
 ```
 
-Then, check out the [installation](../setup) steps to install LlamaIndex.TS and prepare an OpenAI key.
+Then, check out the [installation](/docs/llamaindex/getting_started) steps to install LlamaIndex.TS and prepare an OpenAI key.
 
-You can use [other LLMs](/docs/llamaindex/modules/llms) via their APIs; if you would prefer to use local models check out our [local LLM example](./local_llm).
+You can use [other LLMs](/docs/llamaindex/modules/llms) via their APIs; if you would prefer to use local models check out our [local LLM example](/docs/llamaindex/tutorials/local_llm).
 
 ## Run queries
 
diff --git a/apps/next/src/content/docs/llamaindex/tutorials/structured_data_extraction.mdx b/apps/next/src/content/docs/llamaindex/tutorials/structured_data_extraction.mdx
index 696ca66e389f296cf70b90416791f2611b6da6a7..13c33ec25356b026001fe625070c7e1172707972 100644
--- a/apps/next/src/content/docs/llamaindex/tutorials/structured_data_extraction.mdx
+++ b/apps/next/src/content/docs/llamaindex/tutorials/structured_data_extraction.mdx
@@ -5,9 +5,9 @@ title: Structured data extraction
 import { DynamicCodeBlock } from 'fumadocs-ui/components/dynamic-codeblock';
 import CodeSource from "!raw-loader!../../../../../../../examples/jsonExtract";
 
-Make sure you have installed LlamaIndex.TS and have an OpenAI key. If you haven't, check out the [installation](../setup) guide.
+Make sure you have installed LlamaIndex.TS and have an OpenAI key. If you haven't, check out the [installation](/docs/llamaindex/getting_started) guide.
 
-You can use [other LLMs](/docs/llamaindex/modules/llms) via their APIs; if you would prefer to use local models check out our [local LLM example](./local_llm).
+You can use [other LLMs](/docs/llamaindex/modules/llms) via their APIs; if you would prefer to use local models check out our [local LLM example](/docs/llamaindex/tutorials/local_llm).
 
 ## Set up
 
diff --git a/examples/agent/openai.ts b/examples/agent/openai.ts
index e450452d294d3781b0ae28eeb972bf2617b96f50..77c9c430a66bcf69c0b55d6ee114eb61349fe426 100644
--- a/examples/agent/openai.ts
+++ b/examples/agent/openai.ts
@@ -1,39 +1,35 @@
-import { OpenAI } from "@llamaindex/openai";
-import { FunctionTool, agent } from "llamaindex";
+import { openai } from "@llamaindex/openai";
+import { agent, tool } from "llamaindex";
 import { z } from "zod";
 
-const sumNumbers = FunctionTool.from(
-  ({ a, b }: { a: number; b: number }) => `${a + b}`,
-  {
-    name: "sumNumbers",
-    description: "Use this function to sum two numbers",
-    parameters: z.object({
-      a: z.number().describe("The first number"),
-      b: z.number().describe("The second number"),
-    }),
-  },
-);
+const sumNumbers = tool({
+  name: "sumNumbers",
+  description: "Use this function to sum two numbers",
+  parameters: z.object({
+    a: z.number().describe("The first number"),
+    b: z.number().describe("The second number"),
+  }),
+  execute: ({ a, b }: { a: number; b: number }) => `${a + b}`,
+});
 
-const divideNumbers = FunctionTool.from(
-  ({ a, b }: { a: number; b: number }) => `${a / b}`,
-  {
-    name: "divideNumbers",
-    description: "Use this function to divide two numbers",
-    parameters: z.object({
-      a: z.number().describe("The dividend a to divide"),
-      b: z.number().describe("The divisor b to divide by"),
-    }),
-  },
-);
+const divideNumbers = tool({
+  name: "divideNumbers",
+  description: "Use this function to divide two numbers",
+  parameters: z.object({
+    a: z.number().describe("The dividend a to divide"),
+    b: z.number().describe("The divisor b to divide by"),
+  }),
+  execute: ({ a, b }: { a: number; b: number }) => `${a / b}`,
+});
 
 async function main() {
-  const workflow = agent({
+  const mathAgent = agent({
     tools: [sumNumbers, divideNumbers],
-    llm: new OpenAI({ model: "gpt-4o-mini" }),
+    llm: openai({ model: "gpt-4o-mini" }),
     verbose: false,
   });
 
-  const response = await workflow.run("How much is 5 + 5? then divide by 2");
+  const response = await mathAgent.run("How much is 5 + 5? then divide by 2");
   console.log(response.data);
 }