diff --git a/.changeset/tough-cups-doubt.md b/.changeset/tough-cups-doubt.md
new file mode 100644
index 0000000000000000000000000000000000000000..579b0f76241a44da072bd1f01963bd063166ba90
--- /dev/null
+++ b/.changeset/tough-cups-doubt.md
@@ -0,0 +1,5 @@
+---
+"llamaindex": patch
+---
+
+fix: use LLM metadata mode for generating context of ContextChatEngine
diff --git a/packages/llamaindex/src/engines/chat/ContextChatEngine.ts b/packages/llamaindex/src/engines/chat/ContextChatEngine.ts
index 792c31f45e11b16ecf2e531a8ea1c061ae75c5af..ea1f043506c2810ef944380e9ad2336fbe3934ed 100644
--- a/packages/llamaindex/src/engines/chat/ContextChatEngine.ts
+++ b/packages/llamaindex/src/engines/chat/ContextChatEngine.ts
@@ -4,7 +4,7 @@ import type {
   MessageContent,
   MessageType,
 } from "@llamaindex/core/llms";
-import { EngineResponse } from "@llamaindex/core/schema";
+import { EngineResponse, MetadataMode } from "@llamaindex/core/schema";
 import {
   extractText,
   streamConverter,
@@ -53,6 +53,7 @@ export class ContextChatEngine extends PromptMixin implements ChatEngine {
       contextSystemPrompt: init?.contextSystemPrompt,
       nodePostprocessors: init?.nodePostprocessors,
       contextRole: init?.contextRole,
+      metadataMode: MetadataMode.LLM,
     });
     this.systemPrompt = init.systemPrompt;
   }
diff --git a/packages/llamaindex/src/engines/chat/DefaultContextGenerator.ts b/packages/llamaindex/src/engines/chat/DefaultContextGenerator.ts
index 39839080044fe1aeb169c90baa49a27f89a72209..2b5edba05bee5c1a9ca4dce0f34c58d85df7d7df 100644
--- a/packages/llamaindex/src/engines/chat/DefaultContextGenerator.ts
+++ b/packages/llamaindex/src/engines/chat/DefaultContextGenerator.ts
@@ -1,5 +1,5 @@
 import type { MessageContent, MessageType } from "@llamaindex/core/llms";
-import { type NodeWithScore } from "@llamaindex/core/schema";
+import { MetadataMode, type NodeWithScore } from "@llamaindex/core/schema";
 import type { BaseNodePostprocessor } from "../../postprocessors/index.js";
 import type { ContextSystemPrompt } from "../../Prompt.js";
 import { defaultContextSystemPrompt } from "../../Prompt.js";
@@ -16,12 +16,14 @@ export class DefaultContextGenerator
   contextSystemPrompt: ContextSystemPrompt;
   nodePostprocessors: BaseNodePostprocessor[];
   contextRole: MessageType;
+  metadataMode?: MetadataMode;
 
   constructor(init: {
     retriever: BaseRetriever;
     contextSystemPrompt?: ContextSystemPrompt;
     nodePostprocessors?: BaseNodePostprocessor[];
     contextRole?: MessageType;
+    metadataMode?: MetadataMode;
   }) {
     super();
 
@@ -30,6 +32,7 @@ export class DefaultContextGenerator
       init?.contextSystemPrompt ?? defaultContextSystemPrompt;
     this.nodePostprocessors = init.nodePostprocessors || [];
     this.contextRole = init.contextRole ?? "system";
+    this.metadataMode = init.metadataMode ?? MetadataMode.NONE;
   }
 
   protected _getPrompts(): { contextSystemPrompt: ContextSystemPrompt } {
@@ -75,6 +78,8 @@ export class DefaultContextGenerator
     const content = await createMessageContent(
       this.contextSystemPrompt,
       nodes.map((r) => r.node),
+      undefined,
+      this.metadataMode,
     );
 
     return {