diff --git a/.changeset/nervous-cars-own.md b/.changeset/nervous-cars-own.md
new file mode 100644
index 0000000000000000000000000000000000000000..f4e117f48bd7b75dc6c34fd67ae8c4ee95ee2218
--- /dev/null
+++ b/.changeset/nervous-cars-own.md
@@ -0,0 +1,5 @@
+---
+"@llamaindex/doc": patch
+---
+
+Added documentation for structured output in openai and ollama
diff --git a/apps/next/src/content/docs/llamaindex/modules/llms/ollama.mdx b/apps/next/src/content/docs/llamaindex/modules/llms/ollama.mdx
index ed3161f2446ddde0f7bf5b4a15e81f5e063d143a..4767d285b52edc91e6b6dea401b2feeb2638e338 100644
--- a/apps/next/src/content/docs/llamaindex/modules/llms/ollama.mdx
+++ b/apps/next/src/content/docs/llamaindex/modules/llms/ollama.mdx
@@ -55,6 +55,35 @@ const results = await queryEngine.query({
 });
 ```
 
+## Using JSON Response Format
+
+You can configure Ollama to return responses in JSON format:
+
+```ts
+import { Ollama } from "@llamaindex/llms/ollama";
+import { z } from "zod";
+
+// Simple JSON format
+const llm = new Ollama({ 
+  model: "llama2", 
+  temperature: 0,
+  responseFormat: { type: "json_object" }
+});
+
+// Using Zod schema for validation
+const responseSchema = z.object({
+  summary: z.string(),
+  topics: z.array(z.string()),
+  sentiment: z.enum(["positive", "negative", "neutral"])
+});
+
+const llm = new Ollama({ 
+  model: "llama2", 
+  temperature: 0,
+  responseFormat: responseSchema  
+});
+```
+
 ## Full Example
 
 ```ts
diff --git a/apps/next/src/content/docs/llamaindex/modules/llms/openai.mdx b/apps/next/src/content/docs/llamaindex/modules/llms/openai.mdx
index a861a3501cbfe28cbf7ef331ca00941221853195..fb14423335dd7bd71696a56768b428925d4045ec 100644
--- a/apps/next/src/content/docs/llamaindex/modules/llms/openai.mdx
+++ b/apps/next/src/content/docs/llamaindex/modules/llms/openai.mdx
@@ -46,6 +46,33 @@ or
 Settings.llm = new OpenAI({ model: "gpt-3.5-turbo", temperature: 0, apiKey: <YOUR_API_KEY>, baseURL: "https://api.scaleway.ai/v1" });
 ```
 
+## Using JSON Response Format
+
+You can configure OpenAI to return responses in JSON format:
+
+```ts
+Settings.llm = new OpenAI({ 
+  model: "gpt-4o", 
+  temperature: 0,
+  responseFormat: { type: "json_object" }  
+});
+
+// You can also use a Zod schema to validate the response structure
+import { z } from "zod";
+
+const responseSchema = z.object({
+  summary: z.string(),  
+  topics: z.array(z.string()),
+  sentiment: z.enum(["positive", "negative", "neutral"])
+});
+
+Settings.llm = new OpenAI({ 
+  model: "gpt-4o", 
+  temperature: 0,
+  responseFormat: responseSchema  
+});
+```
+
 ## Load and index documents
 
 For this example, we will use a single document. In a real-world scenario, you would have multiple documents to index.