diff --git a/.changeset/ten-bottles-learn.md b/.changeset/ten-bottles-learn.md
new file mode 100644
index 0000000000000000000000000000000000000000..c89719c81e547b4cbf719786e112bcbddd206774
--- /dev/null
+++ b/.changeset/ten-bottles-learn.md
@@ -0,0 +1,13 @@
+---
+"@llamaindex/core": patch
+"@llamaindex/experimental": patch
+"llamaindex": patch
+---
+
+refactor: align `response-synthesizers` & `chat-engine` module
+
+- builtin event system
+- correct class extends
+- aligin APIs, naming with llama-index python
+- move stream out of first parameter to second parameter for the better tyep checking
+- remove JSONQueryEngine in `@llamaindex/experimental`, as the code quality is not satisify and we will bring it back later
diff --git a/examples/huggingface/embedding.ts b/examples/huggingface/embedding.ts
index 8297b75366a47944bb56de0550e8df14b91694b0..01aca316a98ba26c6a2f4e173659db395195486c 100644
--- a/examples/huggingface/embedding.ts
+++ b/examples/huggingface/embedding.ts
@@ -27,10 +27,12 @@ async function main() {
 
   // Query the index
   const queryEngine = index.asQueryEngine();
-  const stream = await queryEngine.query({
-    query: "What did the author do in college?",
-    stream: true,
-  });
+  const stream = await queryEngine.query(
+    {
+      query: "What did the author do in college?",
+    },
+    true,
+  );
 
   // Output response
   for await (const chunk of stream) {
diff --git a/examples/huggingface/embeddingApi.ts b/examples/huggingface/embeddingApi.ts
index a89df27036ad8d2c05092b5e3f65c531f03703f5..a0bc861cb743653a6340472f442cc5bf2371fcfb 100644
--- a/examples/huggingface/embeddingApi.ts
+++ b/examples/huggingface/embeddingApi.ts
@@ -37,10 +37,12 @@ async function main() {
 
   // Query the index
   const queryEngine = index.asQueryEngine();
-  const stream = await queryEngine.query({
-    query: "What did the author do in college?",
-    stream: true,
-  });
+  const stream = await queryEngine.query(
+    {
+      query: "What did the author do in college?",
+    },
+    true,
+  );
 
   // Output response
   for await (const chunk of stream) {
diff --git a/examples/lowlevel.ts b/examples/lowlevel.ts
index 5f96692f3a3952cda8ad3c98755b7dcea7eb8f38..6922f5055ee7e7567a86c14429ea71a32514e215 100644
--- a/examples/lowlevel.ts
+++ b/examples/lowlevel.ts
@@ -1,7 +1,7 @@
 import {
   Document,
+  getResponseSynthesizer,
   NodeWithScore,
-  ResponseSynthesizer,
   SentenceSplitter,
   TextNode,
 } from "llamaindex";
@@ -14,7 +14,7 @@ import {
 
   console.log(nodes);
 
-  const responseSynthesizer = new ResponseSynthesizer();
+  const responseSynthesizer = getResponseSynthesizer("compact");
 
   const nodesWithScore: NodeWithScore[] = [
     {
@@ -30,7 +30,7 @@ import {
   const stream = await responseSynthesizer.synthesize(
     {
       query: "What age am I?",
-      nodesWithScore,
+      nodes: nodesWithScore,
     },
     true,
   );
diff --git a/examples/multimodal/rag.ts b/examples/multimodal/rag.ts
index 7d9a10c9eec7bf1d805954605525f9a182faa44b..8ac66ffa8cd6efe83cd287f5b7a263e097d7700a 100644
--- a/examples/multimodal/rag.ts
+++ b/examples/multimodal/rag.ts
@@ -1,5 +1,5 @@
 import {
-  MultiModalResponseSynthesizer,
+  getResponseSynthesizer,
   OpenAI,
   Settings,
   VectorStoreIndex,
@@ -27,13 +27,15 @@ async function main() {
   });
 
   const queryEngine = index.asQueryEngine({
-    responseSynthesizer: new MultiModalResponseSynthesizer(),
+    responseSynthesizer: getResponseSynthesizer("multi_modal"),
     retriever: index.asRetriever({ topK: { TEXT: 3, IMAGE: 1 } }),
   });
-  const stream = await queryEngine.query({
-    query: "Tell me more about Vincent van Gogh's famous paintings",
-    stream: true,
-  });
+  const stream = await queryEngine.query(
+    {
+      query: "Tell me more about Vincent van Gogh's famous paintings",
+    },
+    true,
+  );
   for await (const chunk of stream) {
     process.stdout.write(chunk.response);
   }
diff --git a/examples/prompts/promptMixin.ts b/examples/prompts/promptMixin.ts
index 074d2be77fd5b37a9546788a290cb289a95b4294..4833ddbec88778d04604decfc55ee5c7e7fe4b7b 100644
--- a/examples/prompts/promptMixin.ts
+++ b/examples/prompts/promptMixin.ts
@@ -1,8 +1,7 @@
 import {
   Document,
+  getResponseSynthesizer,
   PromptTemplate,
-  ResponseSynthesizer,
-  TreeSummarize,
   TreeSummarizePrompt,
   VectorStoreIndex,
 } from "llamaindex";
@@ -27,9 +26,7 @@ async function main() {
 
   const query = "The quick brown fox jumps over the lazy dog";
 
-  const responseSynthesizer = new ResponseSynthesizer({
-    responseBuilder: new TreeSummarize(),
-  });
+  const responseSynthesizer = getResponseSynthesizer("tree_summarize");
 
   const queryEngine = index.asQueryEngine({
     responseSynthesizer,
diff --git a/examples/readers/src/csv.ts b/examples/readers/src/csv.ts
index 17a6511ea79e11708a7b4ae608a116a60485dd5f..8764829ada49fd6db39dca2b38927b7c98e099c2 100644
--- a/examples/readers/src/csv.ts
+++ b/examples/readers/src/csv.ts
@@ -1,8 +1,7 @@
 import {
-  CompactAndRefine,
+  getResponseSynthesizer,
   OpenAI,
   PromptTemplate,
-  ResponseSynthesizer,
   Settings,
   VectorStoreIndex,
 } from "llamaindex";
@@ -29,8 +28,8 @@ Given the CSV file, generate me Typescript code to answer the question: {query}.
 `,
   });
 
-  const responseSynthesizer = new ResponseSynthesizer({
-    responseBuilder: new CompactAndRefine(undefined, csvPrompt),
+  const responseSynthesizer = getResponseSynthesizer("compact", {
+    textQATemplate: csvPrompt,
   });
 
   const queryEngine = index.asQueryEngine({ responseSynthesizer });
diff --git a/examples/readers/src/llamaparse-json.ts b/examples/readers/src/llamaparse-json.ts
index 1c8576769a75b17e787f45fd681cd2ba92f375fd..f87aa2a2e7c7423bf239b842d0f59d323240449a 100644
--- a/examples/readers/src/llamaparse-json.ts
+++ b/examples/readers/src/llamaparse-json.ts
@@ -1,3 +1,4 @@
+import { createMessageContent } from "@llamaindex/core/utils";
 import {
   Document,
   ImageNode,
@@ -6,7 +7,6 @@ import {
   PromptTemplate,
   VectorStoreIndex,
 } from "llamaindex";
-import { createMessageContent } from "llamaindex/synthesizers/utils";
 
 const reader = new LlamaParseReader();
 async function main() {
diff --git a/examples/vectorIndexAnthropic.ts b/examples/vectorIndexAnthropic.ts
index 5b10cdb4064a8a2f42237fa32151413a5f508b5f..6ebef441fc67579a48df89e78142a176926039b6 100644
--- a/examples/vectorIndexAnthropic.ts
+++ b/examples/vectorIndexAnthropic.ts
@@ -2,12 +2,10 @@ import fs from "node:fs/promises";
 
 import {
   Anthropic,
-  CompactAndRefine,
   Document,
-  ResponseSynthesizer,
   Settings,
   VectorStoreIndex,
-  anthropicTextQaPrompt,
+  getResponseSynthesizer,
 } from "llamaindex";
 
 // Update llm to use Anthropic
@@ -23,9 +21,7 @@ async function main() {
   const document = new Document({ text: essay, id_: path });
 
   // Split text and create embeddings. Store them in a VectorStoreIndex
-  const responseSynthesizer = new ResponseSynthesizer({
-    responseBuilder: new CompactAndRefine(undefined, anthropicTextQaPrompt),
-  });
+  const responseSynthesizer = getResponseSynthesizer("compact");
 
   const index = await VectorStoreIndex.fromDocuments([document]);
 
diff --git a/examples/vectorIndexFromVectorStore.ts b/examples/vectorIndexFromVectorStore.ts
index cf6c672a90fece3c5e0b70c98447661c5706faf5..042dab8551b7f1f636b88fdc96e72d78dcb05b5f 100644
--- a/examples/vectorIndexFromVectorStore.ts
+++ b/examples/vectorIndexFromVectorStore.ts
@@ -1,11 +1,10 @@
 import {
+  getResponseSynthesizer,
   OpenAI,
   OpenAIEmbedding,
-  ResponseSynthesizer,
   RetrieverQueryEngine,
   Settings,
   TextNode,
-  TreeSummarize,
   VectorIndexRetriever,
   VectorStore,
   VectorStoreIndex,
@@ -165,10 +164,7 @@ async function main() {
       similarityTopK: 500,
     });
 
-    const responseSynthesizer = new ResponseSynthesizer({
-      responseBuilder: new TreeSummarize(),
-    });
-
+    const responseSynthesizer = getResponseSynthesizer("tree_summarize");
     return new RetrieverQueryEngine(retriever, responseSynthesizer, {
       filter,
     });
diff --git a/packages/core/package.json b/packages/core/package.json
index 4d1512a2bbb2e0874b6679c0084822d44bfede95..b2fe70d6660571c603a7563eacdff254f9d05a74 100644
--- a/packages/core/package.json
+++ b/packages/core/package.json
@@ -185,6 +185,20 @@
         "types": "./dist/storage/chat-store/index.d.ts",
         "default": "./dist/storage/chat-store/index.js"
       }
+    },
+    "./response-synthesizers": {
+      "require": {
+        "types": "./dist/response-synthesizers/index.d.cts",
+        "default": "./dist/response-synthesizers/index.cjs"
+      },
+      "import": {
+        "types": "./dist/response-synthesizers/index.d.ts",
+        "default": "./dist/response-synthesizers/index.js"
+      },
+      "default": {
+        "types": "./dist/response-synthesizers/index.d.ts",
+        "default": "./dist/response-synthesizers/index.js"
+      }
     }
   },
   "files": [
@@ -210,6 +224,7 @@
   "dependencies": {
     "@llamaindex/env": "workspace:*",
     "@types/node": "^22.5.1",
+    "magic-bytes.js": "^1.10.0",
     "zod": "^3.23.8"
   }
 }
diff --git a/packages/core/src/global/settings/callback-manager.ts b/packages/core/src/global/settings/callback-manager.ts
index edaf9323713f6b00809bb7b4afd42d4fea9f13db..94bd303ae83fe5ef7a3db831e1f1153a85fac2de 100644
--- a/packages/core/src/global/settings/callback-manager.ts
+++ b/packages/core/src/global/settings/callback-manager.ts
@@ -6,8 +6,13 @@ import type {
   ToolCall,
   ToolOutput,
 } from "../../llms";
+import type { QueryEndEvent, QueryStartEvent } from "../../query-engine";
+import type {
+  SynthesizeEndEvent,
+  SynthesizeStartEvent,
+} from "../../response-synthesizers";
 import { TextNode } from "../../schema";
-import { EventCaller, getEventCaller } from "../../utils/event-caller";
+import { EventCaller, getEventCaller } from "../../utils";
 import type { UUID } from "../type";
 
 export type LLMStartEvent = {
@@ -60,6 +65,10 @@ export interface LlamaIndexEventMaps {
   "chunking-end": ChunkingEndEvent;
   "node-parsing-start": NodeParsingStartEvent;
   "node-parsing-end": NodeParsingEndEvent;
+  "query-start": QueryStartEvent;
+  "query-end": QueryEndEvent;
+  "synthesize-start": SynthesizeStartEvent;
+  "synthesize-end": SynthesizeEndEvent;
 }
 
 export class LlamaIndexCustomEvent<T = any> extends CustomEvent<T> {
diff --git a/packages/core/src/indices/prompt-helper.ts b/packages/core/src/indices/prompt-helper.ts
index 0ae144ae0ebe0a3b072063a0ecb6905867985310..a0e1dbc1b13ddc2a5ec79f50c1242fe5e1337d50 100644
--- a/packages/core/src/indices/prompt-helper.ts
+++ b/packages/core/src/indices/prompt-helper.ts
@@ -1,10 +1,13 @@
 import { type Tokenizer, tokenizers } from "@llamaindex/env";
 import {
   DEFAULT_CHUNK_OVERLAP_RATIO,
+  DEFAULT_CHUNK_SIZE,
   DEFAULT_CONTEXT_WINDOW,
   DEFAULT_NUM_OUTPUTS,
   DEFAULT_PADDING,
+  Settings,
 } from "../global";
+import type { LLMMetadata } from "../llms";
 import { SentenceSplitter } from "../node-parser";
 import type { PromptTemplate } from "../prompts";
 
@@ -133,4 +136,29 @@ export class PromptHelper {
     const combinedStr = textChunks.join("\n\n");
     return textSplitter.splitText(combinedStr);
   }
+
+  static fromLLMMetadata(
+    metadata: LLMMetadata,
+    options?: {
+      chunkOverlapRatio?: number;
+      chunkSizeLimit?: number;
+      tokenizer?: Tokenizer;
+      separator?: string;
+    },
+  ) {
+    const {
+      chunkOverlapRatio = DEFAULT_CHUNK_OVERLAP_RATIO,
+      chunkSizeLimit = DEFAULT_CHUNK_SIZE,
+      tokenizer = Settings.tokenizer,
+      separator = " ",
+    } = options ?? {};
+    return new PromptHelper({
+      contextWindow: metadata.contextWindow,
+      numOutput: metadata.maxTokens ?? DEFAULT_NUM_OUTPUTS,
+      chunkOverlapRatio,
+      chunkSizeLimit,
+      tokenizer,
+      separator,
+    });
+  }
 }
diff --git a/packages/core/src/query-engine/base.ts b/packages/core/src/query-engine/base.ts
index 3871b8cb96ca09ab852467e2c9966ed51f5ba4ea..6b9d0b08e8bb1ecdfe4b4c7aef654e71b9f39800 100644
--- a/packages/core/src/query-engine/base.ts
+++ b/packages/core/src/query-engine/base.ts
@@ -1,5 +1,9 @@
+import { randomUUID } from "@llamaindex/env";
+import { Settings } from "../global";
 import type { MessageContent } from "../llms";
-import { EngineResponse, type NodeWithScore } from "../schema";
+import { PromptMixin } from "../prompts";
+import { EngineResponse } from "../schema";
+import { wrapEventCaller } from "../utils";
 
 /**
  * @link https://docs.llamaindex.ai/en/stable/api_reference/schema/?h=querybundle#llama_index.core.schema.QueryBundle
@@ -14,16 +18,37 @@ export type QueryBundle = {
 
 export type QueryType = string | QueryBundle;
 
-export interface BaseQueryEngine {
+export type QueryFn = (
+  strOrQueryBundle: QueryType,
+  stream?: boolean,
+) => Promise<AsyncIterable<EngineResponse> | EngineResponse>;
+
+export abstract class BaseQueryEngine extends PromptMixin {
+  protected constructor(protected readonly _query: QueryFn) {
+    super();
+  }
+
   query(
     strOrQueryBundle: QueryType,
     stream: true,
   ): Promise<AsyncIterable<EngineResponse>>;
   query(strOrQueryBundle: QueryType, stream?: false): Promise<EngineResponse>;
-
-  synthesize?(
+  @wrapEventCaller
+  async query(
     strOrQueryBundle: QueryType,
-    nodes: NodeWithScore[],
-    additionalSources?: Iterator<NodeWithScore>,
-  ): Promise<EngineResponse>;
+    stream = false,
+  ): Promise<EngineResponse | AsyncIterable<EngineResponse>> {
+    const id = randomUUID();
+    const callbackManager = Settings.callbackManager;
+    callbackManager.dispatchEvent("query-start", {
+      id,
+      query: strOrQueryBundle,
+    });
+    const response = await this._query(strOrQueryBundle, stream);
+    callbackManager.dispatchEvent("query-end", {
+      id,
+      response,
+    });
+    return response;
+  }
 }
diff --git a/packages/core/src/query-engine/index.ts b/packages/core/src/query-engine/index.ts
index 5bcfd9a1068a3d8e2839c42b8daede08cf83ba50..c0a73e9bad9b4dec8baed84cc183598b9f762d00 100644
--- a/packages/core/src/query-engine/index.ts
+++ b/packages/core/src/query-engine/index.ts
@@ -1 +1,2 @@
-export type { BaseQueryEngine, QueryBundle, QueryType } from "./base";
+export { BaseQueryEngine, type QueryBundle, type QueryType } from "./base";
+export type { QueryEndEvent, QueryStartEvent } from "./type";
diff --git a/packages/core/src/query-engine/type.ts b/packages/core/src/query-engine/type.ts
new file mode 100644
index 0000000000000000000000000000000000000000..84e93b999d14751f1237881f86c85a8484a6b508
--- /dev/null
+++ b/packages/core/src/query-engine/type.ts
@@ -0,0 +1,12 @@
+import { EngineResponse } from "../schema";
+import type { QueryType } from "./base";
+
+export type QueryStartEvent = {
+  id: string;
+  query: QueryType;
+};
+
+export type QueryEndEvent = {
+  id: string;
+  response: EngineResponse | AsyncIterable<EngineResponse>;
+};
diff --git a/packages/core/src/response-synthesizers/base-synthesizer.ts b/packages/core/src/response-synthesizers/base-synthesizer.ts
new file mode 100644
index 0000000000000000000000000000000000000000..11c92d2dc794ece0ee6e5d94a0a3b173aaff39f2
--- /dev/null
+++ b/packages/core/src/response-synthesizers/base-synthesizer.ts
@@ -0,0 +1,58 @@
+import { randomUUID } from "@llamaindex/env";
+import { Settings } from "../global";
+import { PromptHelper } from "../indices";
+import type { LLM, MessageContent } from "../llms";
+import { PromptMixin } from "../prompts";
+import { EngineResponse, type NodeWithScore } from "../schema";
+import type { SynthesizeQuery } from "./type";
+
+export type BaseSynthesizerOptions = {
+  llm?: LLM;
+  promptHelper?: PromptHelper;
+};
+
+export abstract class BaseSynthesizer extends PromptMixin {
+  llm: LLM;
+  promptHelper: PromptHelper;
+
+  protected constructor(options: Partial<BaseSynthesizerOptions>) {
+    super();
+    this.llm = options.llm ?? Settings.llm;
+    this.promptHelper =
+      options.promptHelper ?? PromptHelper.fromLLMMetadata(this.llm.metadata);
+  }
+
+  protected abstract getResponse(
+    query: MessageContent,
+    textChunks: NodeWithScore[],
+    stream: boolean,
+  ): Promise<EngineResponse | AsyncIterable<EngineResponse>>;
+
+  synthesize(
+    query: SynthesizeQuery,
+    stream: true,
+  ): Promise<AsyncIterable<EngineResponse>>;
+  synthesize(query: SynthesizeQuery, stream?: false): Promise<EngineResponse>;
+  async synthesize(
+    query: SynthesizeQuery,
+    stream = false,
+  ): Promise<EngineResponse | AsyncIterable<EngineResponse>> {
+    const callbackManager = Settings.callbackManager;
+    const id = randomUUID();
+    callbackManager.dispatchEvent("synthesize-start", { id, query });
+    let response: EngineResponse | AsyncIterable<EngineResponse>;
+    if (query.nodes.length === 0) {
+      if (stream) {
+        response = EngineResponse.fromResponse("Empty Response", true);
+      } else {
+        response = EngineResponse.fromResponse("Empty Response", false);
+      }
+    } else {
+      const queryMessage: MessageContent =
+        typeof query.query === "string" ? query.query : query.query.query;
+      response = await this.getResponse(queryMessage, query.nodes, stream);
+    }
+    callbackManager.dispatchEvent("synthesize-end", { id, query, response });
+    return response;
+  }
+}
diff --git a/packages/llamaindex/src/synthesizers/builders.ts b/packages/core/src/response-synthesizers/factory.ts
similarity index 55%
rename from packages/llamaindex/src/synthesizers/builders.ts
rename to packages/core/src/response-synthesizers/factory.ts
index 65c9ac5b2d623b831c25fc16a86654b919570754..868ed6cee029f20264aaf269f6f71b0af56c7801 100644
--- a/packages/llamaindex/src/synthesizers/builders.ts
+++ b/packages/core/src/response-synthesizers/factory.ts
@@ -1,108 +1,52 @@
-import { getBiggestPrompt, type PromptHelper } from "@llamaindex/core/indices";
-import type { LLM } from "@llamaindex/core/llms";
+import { z } from "zod";
+import { getBiggestPrompt } from "../indices";
+import type { MessageContent } from "../llms";
 import {
-  PromptMixin,
   defaultRefinePrompt,
   defaultTextQAPrompt,
   defaultTreeSummarizePrompt,
   type ModuleRecord,
-  type PromptsRecord,
   type RefinePrompt,
   type TextQAPrompt,
   type TreeSummarizePrompt,
-} from "@llamaindex/core/prompts";
-import type { QueryType } from "@llamaindex/core/query-engine";
-import { extractText, streamConverter } from "@llamaindex/core/utils";
-import type { ServiceContext } from "../ServiceContext.js";
+} from "../prompts";
 import {
-  llmFromSettingsOrContext,
-  promptHelperFromSettingsOrContext,
-} from "../Settings.js";
-import type { ResponseBuilder, ResponseBuilderQuery } from "./types.js";
-
-/**
- * Response modes of the response synthesizer
- */
-enum ResponseMode {
-  REFINE = "refine",
-  COMPACT = "compact",
-  TREE_SUMMARIZE = "tree_summarize",
-  SIMPLE = "simple",
-}
-
-/**
- * A response builder that just concatenates responses.
- */
-export class SimpleResponseBuilder
-  extends PromptMixin
-  implements ResponseBuilder
-{
-  llm: LLM;
-  textQATemplate: TextQAPrompt;
-
-  constructor(serviceContext?: ServiceContext, textQATemplate?: TextQAPrompt) {
-    super();
-    this.llm = llmFromSettingsOrContext(serviceContext);
-    this.textQATemplate = textQATemplate ?? defaultTextQAPrompt;
-  }
+  EngineResponse,
+  MetadataMode,
+  type NodeWithScore,
+  TextNode,
+} from "../schema";
+import { createMessageContent, extractText, streamConverter } from "../utils";
+import {
+  BaseSynthesizer,
+  type BaseSynthesizerOptions,
+} from "./base-synthesizer";
 
-  protected _getPrompts(): PromptsRecord {
-    return {
-      textQATemplate: this.textQATemplate,
-    };
-  }
-  protected _updatePrompts(prompts: { textQATemplate: TextQAPrompt }): void {
-    if (prompts.textQATemplate) {
-      this.textQATemplate = prompts.textQATemplate;
-    }
-  }
-  protected _getPromptModules(): ModuleRecord {
-    return {};
-  }
+const responseModeSchema = z.enum([
+  "refine",
+  "compact",
+  "tree_summarize",
+  "multi_modal",
+]);
 
-  getResponse(
-    query: ResponseBuilderQuery,
-    stream: true,
-  ): Promise<AsyncIterable<string>>;
-  getResponse(query: ResponseBuilderQuery, stream?: false): Promise<string>;
-  async getResponse(
-    { query, textChunks }: ResponseBuilderQuery,
-    stream?: boolean,
-  ): Promise<AsyncIterable<string> | string> {
-    const prompt = this.textQATemplate.format({
-      query: extractText(query),
-      context: textChunks.join("\n\n"),
-    });
-    if (stream) {
-      const response = await this.llm.complete({ prompt, stream: true });
-      return streamConverter(response, (chunk) => chunk.text);
-    } else {
-      const response = await this.llm.complete({ prompt, stream: false });
-      return response.text;
-    }
-  }
-}
+export type ResponseMode = z.infer<typeof responseModeSchema>;
 
 /**
  * A response builder that uses the query to ask the LLM generate a better response using multiple text chunks.
  */
-export class Refine extends PromptMixin implements ResponseBuilder {
-  llm: LLM;
-  promptHelper: PromptHelper;
+class Refine extends BaseSynthesizer {
   textQATemplate: TextQAPrompt;
   refineTemplate: RefinePrompt;
 
   constructor(
-    serviceContext?: ServiceContext,
-    textQATemplate?: TextQAPrompt,
-    refineTemplate?: RefinePrompt,
+    options: BaseSynthesizerOptions & {
+      textQATemplate?: TextQAPrompt | undefined;
+      refineTemplate?: RefinePrompt | undefined;
+    },
   ) {
-    super();
-
-    this.llm = llmFromSettingsOrContext(serviceContext);
-    this.promptHelper = promptHelperFromSettingsOrContext(serviceContext);
-    this.textQATemplate = textQATemplate ?? defaultTextQAPrompt;
-    this.refineTemplate = refineTemplate ?? defaultRefinePrompt;
+    super(options);
+    this.textQATemplate = options.textQATemplate ?? defaultTextQAPrompt;
+    this.refineTemplate = options.refineTemplate ?? defaultRefinePrompt;
   }
 
   protected _getPromptModules(): ModuleRecord {
@@ -132,41 +76,47 @@ export class Refine extends PromptMixin implements ResponseBuilder {
     }
   }
 
-  getResponse(
-    query: ResponseBuilderQuery,
-    stream: true,
-  ): Promise<AsyncIterable<string>>;
-  getResponse(query: ResponseBuilderQuery, stream?: false): Promise<string>;
   async getResponse(
-    { query, textChunks, prevResponse }: ResponseBuilderQuery,
-    stream?: boolean,
-  ): Promise<AsyncIterable<string> | string> {
-    let response: AsyncIterable<string> | string | undefined = prevResponse;
+    query: MessageContent,
+    nodes: NodeWithScore[],
+    stream: boolean,
+  ): Promise<EngineResponse | AsyncIterable<EngineResponse>> {
+    let response: AsyncIterable<string> | string | undefined = undefined;
+    const textChunks = nodes.map(({ node }) =>
+      node.getContent(MetadataMode.LLM),
+    );
 
     for (let i = 0; i < textChunks.length; i++) {
-      const chunk = textChunks[i]!;
+      const text = textChunks[i]!;
       const lastChunk = i === textChunks.length - 1;
       if (!response) {
         response = await this.giveResponseSingle(
           query,
-          chunk,
+          text,
           !!stream && lastChunk,
         );
       } else {
         response = await this.refineResponseSingle(
           response as string,
           query,
-          chunk,
+          text,
           !!stream && lastChunk,
         );
       }
     }
 
-    return response ?? "Empty Response";
+    // fixme: no source nodes provided, cannot fix right now due to lack of context
+    if (typeof response === "string") {
+      return EngineResponse.fromResponse(response, false);
+    } else {
+      return streamConverter(response!, (text) =>
+        EngineResponse.fromResponse(text, true),
+      );
+    }
   }
 
   private async giveResponseSingle(
-    query: QueryType,
+    query: MessageContent,
     textChunk: string,
     stream: boolean,
   ): Promise<AsyncIterable<string> | string> {
@@ -203,10 +153,10 @@ export class Refine extends PromptMixin implements ResponseBuilder {
   // eslint-disable-next-line max-params
   private async refineResponseSingle(
     initialReponse: string,
-    query: QueryType,
+    query: MessageContent,
     textChunk: string,
     stream: boolean,
-  ) {
+  ): Promise<AsyncIterable<string> | string> {
     const refineTemplate: RefinePrompt = this.refineTemplate.partialFormat({
       query: extractText(query),
     });
@@ -246,59 +196,54 @@ export class Refine extends PromptMixin implements ResponseBuilder {
 /**
  * CompactAndRefine is a slight variation of Refine that first compacts the text chunks into the smallest possible number of chunks.
  */
-export class CompactAndRefine extends Refine {
-  getResponse(
-    query: ResponseBuilderQuery,
-    stream: true,
-  ): Promise<AsyncIterable<string>>;
-  getResponse(query: ResponseBuilderQuery, stream?: false): Promise<string>;
+class CompactAndRefine extends Refine {
   async getResponse(
-    { query, textChunks, prevResponse }: ResponseBuilderQuery,
-    stream?: boolean,
-  ): Promise<AsyncIterable<string> | string> {
+    query: MessageContent,
+    nodes: NodeWithScore[],
+    stream: boolean,
+  ): Promise<EngineResponse | AsyncIterable<EngineResponse>> {
     const textQATemplate: TextQAPrompt = this.textQATemplate.partialFormat({
       query: extractText(query),
     });
     const refineTemplate: RefinePrompt = this.refineTemplate.partialFormat({
       query: extractText(query),
     });
+    const textChunks = nodes.map(({ node }) =>
+      node.getContent(MetadataMode.LLM),
+    );
 
     const maxPrompt = getBiggestPrompt([textQATemplate, refineTemplate]);
     const newTexts = this.promptHelper.repack(maxPrompt, textChunks);
-    const params = {
-      query,
-      textChunks: newTexts,
-      prevResponse,
-    };
+    const newNodes = newTexts.map((text) => new TextNode({ text }));
     if (stream) {
       return super.getResponse(
-        {
-          ...params,
-        },
+        query,
+        newNodes.map((node) => ({ node })),
         true,
       );
     }
-    return super.getResponse(params);
+    return super.getResponse(
+      query,
+      newNodes.map((node) => ({ node })),
+      false,
+    );
   }
 }
 
 /**
  * TreeSummarize repacks the text chunks into the smallest possible number of chunks and then summarizes them, then recursively does so until there's one chunk left.
  */
-export class TreeSummarize extends PromptMixin implements ResponseBuilder {
-  llm: LLM;
-  promptHelper: PromptHelper;
+class TreeSummarize extends BaseSynthesizer {
   summaryTemplate: TreeSummarizePrompt;
 
   constructor(
-    serviceContext?: ServiceContext,
-    summaryTemplate?: TreeSummarizePrompt,
+    options: BaseSynthesizerOptions & {
+      summaryTemplate?: TreeSummarizePrompt;
+    },
   ) {
-    super();
-
-    this.llm = llmFromSettingsOrContext(serviceContext);
-    this.promptHelper = promptHelperFromSettingsOrContext(serviceContext);
-    this.summaryTemplate = summaryTemplate ?? defaultTreeSummarizePrompt;
+    super(options);
+    this.summaryTemplate =
+      options.summaryTemplate ?? defaultTreeSummarizePrompt;
   }
 
   protected _getPromptModules(): ModuleRecord {
@@ -319,15 +264,14 @@ export class TreeSummarize extends PromptMixin implements ResponseBuilder {
     }
   }
 
-  getResponse(
-    query: ResponseBuilderQuery,
-    stream: true,
-  ): Promise<AsyncIterable<string>>;
-  getResponse(query: ResponseBuilderQuery, stream?: false): Promise<string>;
   async getResponse(
-    { query, textChunks }: ResponseBuilderQuery,
-    stream?: boolean,
-  ): Promise<AsyncIterable<string> | string> {
+    query: MessageContent,
+    nodes: NodeWithScore[],
+    stream: boolean,
+  ): Promise<EngineResponse | AsyncIterable<EngineResponse>> {
+    const textChunks = nodes.map(({ node }) =>
+      node.getContent(MetadataMode.LLM),
+    );
     if (!textChunks || textChunks.length === 0) {
       throw new Error("Must have at least one text chunk");
     }
@@ -347,9 +291,14 @@ export class TreeSummarize extends PromptMixin implements ResponseBuilder {
       };
       if (stream) {
         const response = await this.llm.complete({ ...params, stream });
-        return streamConverter(response, (chunk) => chunk.text);
+        return streamConverter(response, (chunk) =>
+          EngineResponse.fromResponse(chunk.text, true),
+        );
       }
-      return (await this.llm.complete(params)).text;
+      return EngineResponse.fromResponse(
+        (await this.llm.complete(params)).text,
+        false,
+      );
     } else {
       const summaries = await Promise.all(
         packedTextChunks.map((chunk) =>
@@ -362,40 +311,118 @@ export class TreeSummarize extends PromptMixin implements ResponseBuilder {
         ),
       );
 
-      const params = {
-        query,
-        textChunks: summaries.map((s) => s.text),
-      };
       if (stream) {
         return this.getResponse(
-          {
-            ...params,
-          },
+          query,
+          summaries.map((s) => ({
+            node: new TextNode({
+              text: s.text,
+            }),
+          })),
           true,
         );
       }
-      return this.getResponse(params);
+      return this.getResponse(
+        query,
+        summaries.map((s) => ({
+          node: new TextNode({
+            text: s.text,
+          }),
+        })),
+        false,
+      );
     }
   }
 }
 
-export function getResponseBuilder(
-  serviceContext?: ServiceContext,
-  responseMode?: ResponseMode,
-): ResponseBuilder {
-  switch (responseMode) {
-    case ResponseMode.SIMPLE:
-      return new SimpleResponseBuilder(serviceContext);
-    case ResponseMode.REFINE:
-      return new Refine(serviceContext);
-    case ResponseMode.TREE_SUMMARIZE:
-      return new TreeSummarize(serviceContext);
-    default:
-      return new CompactAndRefine(serviceContext);
+class MultiModal extends BaseSynthesizer {
+  metadataMode: MetadataMode;
+  textQATemplate: TextQAPrompt;
+
+  constructor({
+    textQATemplate,
+    metadataMode,
+    ...options
+  }: BaseSynthesizerOptions & {
+    textQATemplate?: TextQAPrompt;
+    metadataMode?: MetadataMode;
+  } = {}) {
+    super(options);
+
+    this.metadataMode = metadataMode ?? MetadataMode.NONE;
+    this.textQATemplate = textQATemplate ?? defaultTextQAPrompt;
+  }
+
+  protected _getPromptModules(): ModuleRecord {
+    return {};
+  }
+
+  protected _getPrompts(): { textQATemplate: TextQAPrompt } {
+    return {
+      textQATemplate: this.textQATemplate,
+    };
+  }
+
+  protected _updatePrompts(promptsDict: {
+    textQATemplate: TextQAPrompt;
+  }): void {
+    if (promptsDict.textQATemplate) {
+      this.textQATemplate = promptsDict.textQATemplate;
+    }
+  }
+
+  protected async getResponse(
+    query: MessageContent,
+    nodes: NodeWithScore[],
+    stream: boolean,
+  ): Promise<EngineResponse | AsyncIterable<EngineResponse>> {
+    const prompt = await createMessageContent(
+      this.textQATemplate,
+      nodes.map(({ node }) => node),
+      // this might not be good as this remove the image information
+      { query: extractText(query) },
+      this.metadataMode,
+    );
+
+    const llm = this.llm;
+
+    if (stream) {
+      const response = await llm.complete({
+        prompt,
+        stream,
+      });
+      return streamConverter(response, ({ text }) =>
+        EngineResponse.fromResponse(text, true),
+      );
+    }
+    const response = await llm.complete({
+      prompt,
+    });
+    return EngineResponse.fromResponse(response.text, false);
   }
 }
 
-export type ResponseBuilderPrompts =
-  | TextQAPrompt
-  | TreeSummarizePrompt
-  | RefinePrompt;
+export function getResponseSynthesizer(
+  mode: ResponseMode,
+  options: BaseSynthesizerOptions & {
+    textQATemplate?: TextQAPrompt;
+    refineTemplate?: RefinePrompt;
+    summaryTemplate?: TreeSummarizePrompt;
+    metadataMode?: MetadataMode;
+  } = {},
+) {
+  switch (mode) {
+    case "compact": {
+      return new CompactAndRefine(options);
+    }
+    case "refine": {
+      return new Refine(options);
+    }
+    case "tree_summarize": {
+      return new TreeSummarize(options);
+    }
+    case "multi_modal": {
+      return new MultiModal(options);
+    }
+  }
+}
diff --git a/packages/core/src/response-synthesizers/index.ts b/packages/core/src/response-synthesizers/index.ts
new file mode 100644
index 0000000000000000000000000000000000000000..2e471f99ba84f0710c07f8ed472d6e10e9593f53
--- /dev/null
+++ b/packages/core/src/response-synthesizers/index.ts
@@ -0,0 +1,10 @@
+export {
+  BaseSynthesizer,
+  type BaseSynthesizerOptions,
+} from "./base-synthesizer";
+export { getResponseSynthesizer, type ResponseMode } from "./factory";
+export type {
+  SynthesizeEndEvent,
+  SynthesizeQuery,
+  SynthesizeStartEvent,
+} from "./type";
diff --git a/packages/core/src/response-synthesizers/type.ts b/packages/core/src/response-synthesizers/type.ts
new file mode 100644
index 0000000000000000000000000000000000000000..0cdd1045e45432b0bc411ec7566881951a26f958
--- /dev/null
+++ b/packages/core/src/response-synthesizers/type.ts
@@ -0,0 +1,19 @@
+import type { QueryType } from "../query-engine";
+import { EngineResponse, type NodeWithScore } from "../schema";
+
+export type SynthesizeQuery = {
+  query: QueryType;
+  nodes: NodeWithScore[];
+  additionalSourceNodes?: NodeWithScore[];
+};
+
+export type SynthesizeStartEvent = {
+  id: string;
+  query: SynthesizeQuery;
+};
+
+export type SynthesizeEndEvent = {
+  id: string;
+  query: SynthesizeQuery;
+  response: EngineResponse | AsyncIterable<EngineResponse>;
+};
diff --git a/packages/core/src/utils/index.ts b/packages/core/src/utils/index.ts
index 6c397467e47b759c4c08263d39b80f60b2acc23e..d589702dd564f57119e4850b1f93ac44b2a6aaa6 100644
--- a/packages/core/src/utils/index.ts
+++ b/packages/core/src/utils/index.ts
@@ -1,4 +1,4 @@
-export { wrapEventCaller } from "./event-caller";
+export { EventCaller, getEventCaller, wrapEventCaller } from "./event-caller";
 
 export async function* streamConverter<S, D>(
   stream: AsyncIterable<S>,
@@ -47,10 +47,12 @@ export async function* streamReducer<S, D>(params: {
 export { wrapLLMEvent } from "./wrap-llm-event";
 
 export {
+  createMessageContent,
   extractDataUrlComponents,
   extractImage,
   extractSingleText,
   extractText,
+  imageToDataUrl,
   messagesToHistory,
   toToolDescriptions,
 } from "./llms";
diff --git a/packages/core/src/utils/llms.ts b/packages/core/src/utils/llms.ts
index 1620477e1b36e33d4a9d1d478a8c80d4eb2c8f73..b3d9825390bcc011d5e58cc17f006fc6d65bc5d2 100644
--- a/packages/core/src/utils/llms.ts
+++ b/packages/core/src/utils/llms.ts
@@ -1,3 +1,5 @@
+import { fs } from "@llamaindex/env";
+import { filetypemime } from "magic-bytes.js";
 import type {
   ChatMessage,
   MessageContent,
@@ -5,8 +7,16 @@ import type {
   MessageContentTextDetail,
   ToolMetadata,
 } from "../llms";
+import type { BasePromptTemplate } from "../prompts";
 import type { QueryType } from "../query-engine";
 import type { ImageType } from "../schema";
+import {
+  type BaseNode,
+  ImageNode,
+  MetadataMode,
+  ModalityType,
+  splitNodesByType,
+} from "../schema";
 
 /**
  * Extracts just the text whether from
@@ -107,3 +117,99 @@ export function toToolDescriptions(tools: ToolMetadata[]): string {
 
   return JSON.stringify(toolsObj, null, 4);
 }
+
+async function blobToDataUrl(input: Blob) {
+  const buffer = Buffer.from(await input.arrayBuffer());
+  const mimes = filetypemime(buffer);
+  if (mimes.length < 1) {
+    throw new Error("Unsupported image type");
+  }
+  return "data:" + mimes[0] + ";base64," + buffer.toString("base64");
+}
+
+export async function imageToDataUrl(
+  input: ImageType | Uint8Array,
+): Promise<string> {
+  // first ensure, that the input is a Blob
+  if (
+    (input instanceof URL && input.protocol === "file:") ||
+    typeof input === "string"
+  ) {
+    // string or file URL
+    const dataBuffer = await fs.readFile(
+      input instanceof URL ? input.pathname : input,
+    );
+    input = new Blob([dataBuffer]);
+  } else if (!(input instanceof Blob)) {
+    if (input instanceof URL) {
+      throw new Error(`Unsupported URL with protocol: ${input.protocol}`);
+    } else if (input instanceof Uint8Array) {
+      input = new Blob([input]); // convert Uint8Array to Blob
+    } else {
+      throw new Error(`Unsupported input type: ${typeof input}`);
+    }
+  }
+  return await blobToDataUrl(input);
+}
+
+// eslint-disable-next-line max-params
+async function createContentPerModality(
+  prompt: BasePromptTemplate,
+  type: ModalityType,
+  nodes: BaseNode[],
+  extraParams: Record<string, string>,
+  metadataMode: MetadataMode,
+): Promise<MessageContentDetail[]> {
+  switch (type) {
+    case ModalityType.TEXT:
+      return [
+        {
+          type: "text",
+          text: prompt.format({
+            ...extraParams,
+            context: nodes.map((r) => r.getContent(metadataMode)).join("\n\n"),
+          }),
+        },
+      ];
+    case ModalityType.IMAGE:
+      const images: MessageContentDetail[] = await Promise.all(
+        (nodes as ImageNode[]).map(async (node) => {
+          return {
+            type: "image_url",
+            image_url: {
+              url: await imageToDataUrl(node.image),
+            },
+          } satisfies MessageContentDetail;
+        }),
+      );
+      return images;
+    default:
+      return [];
+  }
+}
+
+export async function createMessageContent(
+  prompt: BasePromptTemplate,
+  nodes: BaseNode[],
+  extraParams: Record<string, string> = {},
+  metadataMode: MetadataMode = MetadataMode.NONE,
+): Promise<MessageContentDetail[]> {
+  const content: MessageContentDetail[] = [];
+  const nodeMap = splitNodesByType(nodes);
+  for (const type in nodeMap) {
+    // for each retrieved modality type, create message content
+    const nodes = nodeMap[type as ModalityType];
+    if (nodes) {
+      content.push(
+        ...(await createContentPerModality(
+          prompt,
+          type as ModalityType,
+          nodes,
+          extraParams,
+          metadataMode,
+        )),
+      );
+    }
+  }
+  return content;
+}
diff --git a/packages/experimental/src/engines/query/JSONQueryEngine.ts b/packages/experimental/src/engines/query/JSONQueryEngine.ts
deleted file mode 100644
index f6a73cd8ea3623b9c16a8d7cfa3bfe7e9032d68f..0000000000000000000000000000000000000000
--- a/packages/experimental/src/engines/query/JSONQueryEngine.ts
+++ /dev/null
@@ -1,211 +0,0 @@
-import jsonpath from "jsonpath";
-
-import { EngineResponse } from "llamaindex";
-
-import { serviceContextFromDefaults, type ServiceContext } from "llamaindex";
-
-import type {
-  QueryEngine,
-  QueryEngineParamsNonStreaming,
-  QueryEngineParamsStreaming,
-} from "llamaindex";
-
-import {
-  defaultJsonPathPrompt,
-  defaultResponseSynthesizePrompt,
-  type JSONPathPrompt,
-  type ResponseSynthesisPrompt,
-} from "./prompt.js";
-
-export type JSONSchemaType = Record<string, unknown>;
-
-function removeExtraQuotes(expr: string) {
-  let startIndex = 0;
-  let endIndex = expr.length;
-
-  // Trim the leading backticks and single quotes
-  while (
-    startIndex < endIndex &&
-    (expr[startIndex] === "`" || expr[startIndex] === "'")
-  ) {
-    startIndex++;
-  }
-
-  // Trim the trailing backticks and single quotes
-  while (
-    endIndex > startIndex &&
-    (expr[endIndex - 1] === "`" || expr[endIndex - 1] === "'")
-  ) {
-    endIndex--;
-  }
-
-  // Return the trimmed substring
-  return expr.substring(startIndex, endIndex);
-}
-
-export const defaultOutputProcessor = async ({
-  llmOutput,
-  jsonValue,
-}: {
-  llmOutput: string;
-  jsonValue: JSONSchemaType;
-}): Promise<Record<string, unknown>[]> => {
-  const expressions = llmOutput
-    .split(",")
-    .map((expr) => removeExtraQuotes(expr.trim()));
-
-  const results: Record<string, unknown>[] = [];
-
-  for (const expression of expressions) {
-    // get the key for example content from $.content
-    const key = expression.split(".").pop();
-
-    try {
-      const datums = jsonpath.query(jsonValue, expression);
-
-      if (!key) throw new Error(`Invalid JSON Path: ${expression}`);
-
-      for (const datum of datums) {
-        // in case there is a filter like [?(@.username=='simon')] without a key ie: $..comments[?(@.username=='simon').content]
-        if (key.includes("==")) {
-          results.push(datum);
-          continue;
-        }
-
-        results.push({
-          [key]: datum,
-        });
-      }
-    } catch (err) {
-      throw new Error(`Invalid JSON Path: ${expression}`);
-    }
-  }
-
-  return results;
-};
-
-type OutputProcessor = typeof defaultOutputProcessor;
-
-/**
- * A JSON query engine that uses JSONPath to query a JSON object.
- */
-export class JSONQueryEngine implements QueryEngine {
-  jsonValue: JSONSchemaType;
-  jsonSchema: JSONSchemaType;
-  serviceContext: ServiceContext;
-  outputProcessor: OutputProcessor;
-  verbose: boolean;
-  jsonPathPrompt: JSONPathPrompt;
-  synthesizeResponse: boolean;
-  responseSynthesisPrompt: ResponseSynthesisPrompt;
-
-  constructor(init: {
-    jsonValue: JSONSchemaType;
-    jsonSchema: JSONSchemaType;
-    serviceContext?: ServiceContext;
-    jsonPathPrompt?: JSONPathPrompt;
-    outputProcessor?: OutputProcessor;
-    synthesizeResponse?: boolean;
-    responseSynthesisPrompt?: ResponseSynthesisPrompt;
-    verbose?: boolean;
-  }) {
-    this.jsonValue = init.jsonValue;
-    this.jsonSchema = init.jsonSchema;
-    this.serviceContext = init.serviceContext ?? serviceContextFromDefaults({});
-    this.jsonPathPrompt = init.jsonPathPrompt ?? defaultJsonPathPrompt;
-    this.outputProcessor = init.outputProcessor ?? defaultOutputProcessor;
-    this.verbose = init.verbose ?? false;
-    this.synthesizeResponse = init.synthesizeResponse ?? true;
-    this.responseSynthesisPrompt =
-      init.responseSynthesisPrompt ?? defaultResponseSynthesizePrompt;
-  }
-
-  getPrompts(): Record<string, unknown> {
-    return {
-      jsonPathPrompt: this.jsonPathPrompt,
-      responseSynthesisPrompt: this.responseSynthesisPrompt,
-    };
-  }
-
-  updatePrompts(prompts: {
-    jsonPathPrompt?: JSONPathPrompt;
-    responseSynthesisPrompt?: ResponseSynthesisPrompt;
-  }): void {
-    if (prompts.jsonPathPrompt) {
-      this.jsonPathPrompt = prompts.jsonPathPrompt;
-    }
-    if (prompts.responseSynthesisPrompt) {
-      this.responseSynthesisPrompt = prompts.responseSynthesisPrompt;
-    }
-  }
-
-  getPromptModules(): Record<string, unknown> {
-    return {};
-  }
-
-  getSchemaContext(): string {
-    return JSON.stringify(this.jsonSchema);
-  }
-
-  query(
-    params: QueryEngineParamsStreaming,
-  ): Promise<AsyncIterable<EngineResponse>>;
-  query(params: QueryEngineParamsNonStreaming): Promise<EngineResponse>;
-  async query(
-    params: QueryEngineParamsStreaming | QueryEngineParamsNonStreaming,
-  ): Promise<EngineResponse | AsyncIterable<EngineResponse>> {
-    const { query, stream } = params;
-
-    if (stream) {
-      throw new Error("Streaming is not supported");
-    }
-
-    const schema = this.getSchemaContext();
-
-    const { text: jsonPathResponse } = await this.serviceContext.llm.complete({
-      prompt: this.jsonPathPrompt({ query, schema }),
-    });
-
-    if (this.verbose) {
-      console.log(
-        `> JSONPath Instructions:\n\`\`\`\n${jsonPathResponse}\n\`\`\`\n`,
-      );
-    }
-
-    const jsonPathOutput = await this.outputProcessor({
-      llmOutput: jsonPathResponse,
-      jsonValue: this.jsonValue,
-    });
-
-    if (this.verbose) {
-      console.log(`> JSONPath Output: ${jsonPathOutput}\n`);
-    }
-
-    let responseStr;
-
-    if (this.synthesizeResponse) {
-      responseStr = await this.serviceContext.llm.complete({
-        prompt: this.responseSynthesisPrompt({
-          query,
-          jsonSchema: schema,
-          jsonPath: jsonPathResponse,
-          jsonPathValue: JSON.stringify(jsonPathOutput),
-        }),
-      });
-
-      responseStr = responseStr.text;
-    } else {
-      responseStr = JSON.stringify(jsonPathOutput);
-    }
-
-    const responseMetadata = {
-      jsonPathResponse,
-    };
-
-    const response = EngineResponse.fromResponse(responseStr, false);
-
-    response.metadata = responseMetadata;
-
-    return response;
-  }
-}
diff --git a/packages/experimental/src/engines/query/index.ts b/packages/experimental/src/engines/query/index.ts
deleted file mode 100644
index 9187f0735acd7c75e64a278b5b671badad017a29..0000000000000000000000000000000000000000
--- a/packages/experimental/src/engines/query/index.ts
+++ /dev/null
@@ -1 +0,0 @@
-export * from "./JSONQueryEngine.js";
diff --git a/packages/experimental/src/engines/query/prompt.ts b/packages/experimental/src/engines/query/prompt.ts
deleted file mode 100644
index 0b38c3f9f0437223b8ab15e70190ee3a74798e3b..0000000000000000000000000000000000000000
--- a/packages/experimental/src/engines/query/prompt.ts
+++ /dev/null
@@ -1,36 +0,0 @@
-export const defaultJsonPathPrompt = ({
-  query,
-  schema,
-}: {
-  query: string;
-  schema: string;
-}) => `
-We have provided a JSON schema below:
-${schema}
-Given a task, respond with a JSON Path query that can retrieve data from a JSON value that matches the schema.
-Task: ${query}
-JSONPath: 
-`;
-
-export type JSONPathPrompt = typeof defaultJsonPathPrompt;
-
-export const defaultResponseSynthesizePrompt = ({
-  query,
-  jsonSchema,
-  jsonPath,
-  jsonPathValue,
-}: {
-  query: string;
-  jsonSchema: string;
-  jsonPath: string;
-  jsonPathValue: string;
-}) => `
-Given a query, synthesize a response to satisfy the query using the JSON results. Only include details that are relevant to the query. If you don't know the answer, then say that.
-JSON Schema: ${jsonSchema}
-JSON Path: ${jsonPath}
-Value at path: ${jsonPathValue}
-Query: ${query}
-Response: 
-`;
-
-export type ResponseSynthesisPrompt = typeof defaultResponseSynthesizePrompt;
diff --git a/packages/experimental/src/index.ts b/packages/experimental/src/index.ts
index 10e6ce4d1f99240ec3754fff71ac7e51e52409ab..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 100644
--- a/packages/experimental/src/index.ts
+++ b/packages/experimental/src/index.ts
@@ -1 +0,0 @@
-export * from "./engines/query/index.js";
diff --git a/packages/llamaindex/e2e/examples/cloudflare-worker-agent/tsconfig.json b/packages/llamaindex/e2e/examples/cloudflare-worker-agent/tsconfig.json
index 60dd42cd9e17e28febebb794a642f74eb8b916a8..bf4484d4ea3463ebf0aa032d0ac529b767264e92 100644
--- a/packages/llamaindex/e2e/examples/cloudflare-worker-agent/tsconfig.json
+++ b/packages/llamaindex/e2e/examples/cloudflare-worker-agent/tsconfig.json
@@ -100,7 +100,8 @@
 
     /* Completeness */
     // "skipDefaultLibCheck": true,                      /* Skip type checking .d.ts files that are included with TypeScript. */
-    "skipLibCheck": true /* Skip type checking all .d.ts files. */
+    "skipLibCheck": true /* Skip type checking all .d.ts files. */,
+    "tsBuildInfoFile": "./dist/.tsbuildinfo"
   },
   "exclude": ["test"]
 }
diff --git a/packages/llamaindex/e2e/examples/waku-query-engine/src/actions.ts b/packages/llamaindex/e2e/examples/waku-query-engine/src/actions.ts
index 5f44b7066f8d1c9fe9a5dc416bca6ba4ad84d2be..8978b12c881ac0708d7fe519f762792191883b94 100644
--- a/packages/llamaindex/e2e/examples/waku-query-engine/src/actions.ts
+++ b/packages/llamaindex/e2e/examples/waku-query-engine/src/actions.ts
@@ -1,7 +1,7 @@
 "use server";
-import { Document, VectorStoreIndex, type QueryEngine } from "llamaindex";
+import { BaseQueryEngine, Document, VectorStoreIndex } from "llamaindex";
 import { readFile } from "node:fs/promises";
-let _queryEngine: QueryEngine;
+let _queryEngine: BaseQueryEngine;
 
 async function lazyLoadQueryEngine() {
   if (!_queryEngine) {
diff --git a/packages/llamaindex/src/ServiceContext.ts b/packages/llamaindex/src/ServiceContext.ts
index a185423fbdcb68759c9049dbea9514322f82fd89..9500ce44371d58e8780f9e44af4584f13b5e0eb3 100644
--- a/packages/llamaindex/src/ServiceContext.ts
+++ b/packages/llamaindex/src/ServiceContext.ts
@@ -9,6 +9,8 @@ import { OpenAI, OpenAIEmbedding } from "@llamaindex/openai";
 
 /**
  * The ServiceContext is a collection of components that are used in different parts of the application.
+ *
+ * @deprecated This will no longer supported, please use `Settings` instead.
  */
 export interface ServiceContext {
   llm: LLM;
diff --git a/packages/llamaindex/src/cloud/LlamaCloudIndex.ts b/packages/llamaindex/src/cloud/LlamaCloudIndex.ts
index 14ddb6f8de3884807b08d50f7d48fb4c1f2b8f95..58018917440d44b0df16c51dae7042386fdc1eb7 100644
--- a/packages/llamaindex/src/cloud/LlamaCloudIndex.ts
+++ b/packages/llamaindex/src/cloud/LlamaCloudIndex.ts
@@ -1,9 +1,9 @@
+import type { BaseQueryEngine } from "@llamaindex/core/query-engine";
+import type { BaseSynthesizer } from "@llamaindex/core/response-synthesizers";
 import type { Document, TransformComponent } from "@llamaindex/core/schema";
 import type { BaseRetriever } from "../Retriever.js";
 import { RetrieverQueryEngine } from "../engines/query/RetrieverQueryEngine.js";
 import type { BaseNodePostprocessor } from "../postprocessors/types.js";
-import type { BaseSynthesizer } from "../synthesizers/types.js";
-import type { QueryEngine } from "../types.js";
 import type { CloudRetrieveParams } from "./LlamaCloudRetriever.js";
 import { LlamaCloudRetriever } from "./LlamaCloudRetriever.js";
 import { getPipelineCreate } from "./config.js";
@@ -300,7 +300,7 @@ export class LlamaCloudIndex {
       preFilters?: unknown;
       nodePostprocessors?: BaseNodePostprocessor[];
     } & CloudRetrieveParams,
-  ): QueryEngine {
+  ): BaseQueryEngine {
     const retriever = new LlamaCloudRetriever({
       ...this.params,
       ...params,
diff --git a/packages/llamaindex/src/engines/chat/CondenseQuestionChatEngine.ts b/packages/llamaindex/src/engines/chat/CondenseQuestionChatEngine.ts
index a20cf92f49932be882f8c24a7ed75a1833543519..1ff9fdfe4fb1bf73dc3edf78b6611233fbf8767e 100644
--- a/packages/llamaindex/src/engines/chat/CondenseQuestionChatEngine.ts
+++ b/packages/llamaindex/src/engines/chat/CondenseQuestionChatEngine.ts
@@ -6,6 +6,7 @@ import {
   type ModuleRecord,
   PromptMixin,
 } from "@llamaindex/core/prompts";
+import type { BaseQueryEngine } from "@llamaindex/core/query-engine";
 import type { EngineResponse } from "@llamaindex/core/schema";
 import {
   extractText,
@@ -15,7 +16,6 @@ import {
 } from "@llamaindex/core/utils";
 import type { ServiceContext } from "../../ServiceContext.js";
 import { llmFromSettingsOrContext } from "../../Settings.js";
-import type { QueryEngine } from "../../types.js";
 import type {
   ChatEngine,
   ChatEngineParamsNonStreaming,
@@ -37,13 +37,13 @@ export class CondenseQuestionChatEngine
   extends PromptMixin
   implements ChatEngine
 {
-  queryEngine: QueryEngine;
+  queryEngine: BaseQueryEngine;
   chatHistory: BaseMemory;
   llm: LLM;
   condenseMessagePrompt: CondenseQuestionPrompt;
 
   constructor(init: {
-    queryEngine: QueryEngine;
+    queryEngine: BaseQueryEngine;
     chatHistory: ChatMessage[];
     serviceContext?: ServiceContext;
     condenseMessagePrompt?: CondenseQuestionPrompt;
@@ -114,10 +114,12 @@ export class CondenseQuestionChatEngine
     chatHistory.put({ content: message, role: "user" });
 
     if (stream) {
-      const stream = await this.queryEngine.query({
-        query: condensedQuestion,
-        stream: true,
-      });
+      const stream = await this.queryEngine.query(
+        {
+          query: condensedQuestion,
+        },
+        true,
+      );
       return streamReducer({
         stream,
         initialValue: "",
diff --git a/packages/llamaindex/src/engines/chat/DefaultContextGenerator.ts b/packages/llamaindex/src/engines/chat/DefaultContextGenerator.ts
index 378f2b05ff48d2ad130ec3aa2d495557efe5ef56..976400556d26b5bd5a953dccf5132b9a6be0c6e3 100644
--- a/packages/llamaindex/src/engines/chat/DefaultContextGenerator.ts
+++ b/packages/llamaindex/src/engines/chat/DefaultContextGenerator.ts
@@ -6,9 +6,9 @@ import {
   PromptMixin,
 } from "@llamaindex/core/prompts";
 import { MetadataMode, type NodeWithScore } from "@llamaindex/core/schema";
+import { createMessageContent } from "@llamaindex/core/utils";
 import type { BaseNodePostprocessor } from "../../postprocessors/index.js";
 import type { BaseRetriever } from "../../Retriever.js";
-import { createMessageContent } from "../../synthesizers/utils.js";
 import type { Context, ContextGenerator } from "./types.js";
 
 export class DefaultContextGenerator
diff --git a/packages/llamaindex/src/engines/query/RetrieverQueryEngine.ts b/packages/llamaindex/src/engines/query/RetrieverQueryEngine.ts
index 665d3150075b8cf846c46a18e33d503be58c855e..aebe09dd6b0fc12ab1b0ea112ac9a90aac855723 100644
--- a/packages/llamaindex/src/engines/query/RetrieverQueryEngine.ts
+++ b/packages/llamaindex/src/engines/query/RetrieverQueryEngine.ts
@@ -1,20 +1,15 @@
-import { PromptMixin } from "@llamaindex/core/prompts";
-import { EngineResponse, type NodeWithScore } from "@llamaindex/core/schema";
-import { wrapEventCaller } from "@llamaindex/core/utils";
+import { BaseQueryEngine } from "@llamaindex/core/query-engine";
+import type { BaseSynthesizer } from "@llamaindex/core/response-synthesizers";
+import { getResponseSynthesizer } from "@llamaindex/core/response-synthesizers";
+import { type NodeWithScore } from "@llamaindex/core/schema";
+import { extractText } from "@llamaindex/core/utils";
 import type { BaseNodePostprocessor } from "../../postprocessors/index.js";
 import type { BaseRetriever } from "../../Retriever.js";
-import type { BaseSynthesizer } from "../../synthesizers/index.js";
-import { ResponseSynthesizer } from "../../synthesizers/index.js";
-import type {
-  QueryEngine,
-  QueryEngineParamsNonStreaming,
-  QueryEngineParamsStreaming,
-} from "../../types.js";
 
 /**
  * A query engine that uses a retriever to query an index and then synthesizes the response.
  */
-export class RetrieverQueryEngine extends PromptMixin implements QueryEngine {
+export class RetrieverQueryEngine extends BaseQueryEngine {
   retriever: BaseRetriever;
   responseSynthesizer: BaseSynthesizer;
   nodePostprocessors: BaseNodePostprocessor[];
@@ -26,14 +21,36 @@ export class RetrieverQueryEngine extends PromptMixin implements QueryEngine {
     preFilters?: unknown,
     nodePostprocessors?: BaseNodePostprocessor[],
   ) {
-    super();
+    super(async (strOrQueryBundle, stream) => {
+      const nodesWithScore = await this.retrieve(
+        typeof strOrQueryBundle === "string"
+          ? strOrQueryBundle
+          : extractText(strOrQueryBundle),
+      );
+      if (stream) {
+        return this.responseSynthesizer.synthesize(
+          {
+            query:
+              typeof strOrQueryBundle === "string"
+                ? { query: strOrQueryBundle }
+                : strOrQueryBundle,
+            nodes: nodesWithScore,
+          },
+          true,
+        );
+      }
+      return this.responseSynthesizer.synthesize({
+        query:
+          typeof strOrQueryBundle === "string"
+            ? { query: strOrQueryBundle }
+            : strOrQueryBundle,
+        nodes: nodesWithScore,
+      });
+    });
 
     this.retriever = retriever;
     this.responseSynthesizer =
-      responseSynthesizer ||
-      new ResponseSynthesizer({
-        serviceContext: retriever.serviceContext,
-      });
+      responseSynthesizer || getResponseSynthesizer("compact");
     this.preFilters = preFilters;
     this.nodePostprocessors = nodePostprocessors || [];
   }
@@ -71,29 +88,4 @@ export class RetrieverQueryEngine extends PromptMixin implements QueryEngine {
 
     return await this.applyNodePostprocessors(nodes, query);
   }
-
-  query(
-    params: QueryEngineParamsStreaming,
-  ): Promise<AsyncIterable<EngineResponse>>;
-  query(params: QueryEngineParamsNonStreaming): Promise<EngineResponse>;
-  @wrapEventCaller
-  async query(
-    params: QueryEngineParamsStreaming | QueryEngineParamsNonStreaming,
-  ): Promise<EngineResponse | AsyncIterable<EngineResponse>> {
-    const { query, stream } = params;
-    const nodesWithScore = await this.retrieve(query);
-    if (stream) {
-      return this.responseSynthesizer.synthesize(
-        {
-          query,
-          nodesWithScore,
-        },
-        true,
-      );
-    }
-    return this.responseSynthesizer.synthesize({
-      query,
-      nodesWithScore,
-    });
-  }
 }
diff --git a/packages/llamaindex/src/engines/query/RouterQueryEngine.ts b/packages/llamaindex/src/engines/query/RouterQueryEngine.ts
index e564799ec09b8fdb4a8b3792c32babca8dd2a6c4..8d4f6058daddfce0352871d726f4e2a16a18699a 100644
--- a/packages/llamaindex/src/engines/query/RouterQueryEngine.ts
+++ b/packages/llamaindex/src/engines/query/RouterQueryEngine.ts
@@ -1,20 +1,20 @@
-import { PromptMixin } from "@llamaindex/core/prompts";
-import type { QueryType } from "@llamaindex/core/query-engine";
+import {
+  BaseQueryEngine,
+  type QueryBundle,
+} from "@llamaindex/core/query-engine";
+import {
+  BaseSynthesizer,
+  getResponseSynthesizer,
+} from "@llamaindex/core/response-synthesizers";
 import { EngineResponse, type NodeWithScore } from "@llamaindex/core/schema";
 import { extractText } from "@llamaindex/core/utils";
 import type { ServiceContext } from "../../ServiceContext.js";
 import { llmFromSettingsOrContext } from "../../Settings.js";
 import type { BaseSelector } from "../../selectors/index.js";
 import { LLMSingleSelector } from "../../selectors/index.js";
-import { TreeSummarize } from "../../synthesizers/index.js";
-import type {
-  QueryEngine,
-  QueryEngineParamsNonStreaming,
-  QueryEngineParamsStreaming,
-} from "../../types.js";
 
 type RouterQueryEngineTool = {
-  queryEngine: QueryEngine;
+  queryEngine: BaseQueryEngine;
   description: string;
 };
 
@@ -23,59 +23,67 @@ type RouterQueryEngineMetadata = {
 };
 
 async function combineResponses(
-  summarizer: TreeSummarize,
+  summarizer: BaseSynthesizer,
   responses: EngineResponse[],
-  queryType: QueryType,
+  queryBundle: QueryBundle,
   verbose: boolean = false,
 ): Promise<EngineResponse> {
   if (verbose) {
     console.log("Combining responses from multiple query engines.");
   }
 
-  const responseStrs: string[] = [];
   const sourceNodes: NodeWithScore[] = [];
 
   for (const response of responses) {
     if (response?.sourceNodes) {
       sourceNodes.push(...response.sourceNodes);
     }
-
-    responseStrs.push(extractText(response.message.content));
   }
 
-  const summary = await summarizer.getResponse({
-    query: extractText(queryType),
-    textChunks: responseStrs,
+  return await summarizer.synthesize({
+    query: queryBundle,
+    nodes: sourceNodes,
   });
-
-  return EngineResponse.fromResponse(summary, false, sourceNodes);
 }
 
 /**
  * A query engine that uses multiple query engines and selects the best one.
  */
-export class RouterQueryEngine extends PromptMixin implements QueryEngine {
+export class RouterQueryEngine extends BaseQueryEngine {
   private selector: BaseSelector;
-  private queryEngines: QueryEngine[];
+  private queryEngines: BaseQueryEngine[];
   private metadatas: RouterQueryEngineMetadata[];
-  private summarizer: TreeSummarize;
+  private summarizer: BaseSynthesizer;
   private verbose: boolean;
 
   constructor(init: {
     selector: BaseSelector;
     queryEngineTools: RouterQueryEngineTool[];
     serviceContext?: ServiceContext | undefined;
-    summarizer?: TreeSummarize | undefined;
+    summarizer?: BaseSynthesizer | undefined;
     verbose?: boolean | undefined;
   }) {
-    super();
+    super(async (strOrQueryBundle, stream) => {
+      const response = await this.queryRoute(
+        typeof strOrQueryBundle === "string"
+          ? { query: strOrQueryBundle }
+          : strOrQueryBundle,
+      );
+
+      if (stream) {
+        throw new Error("Streaming is not supported yet.");
+      }
+
+      return response;
+    });
 
     this.selector = init.selector;
     this.queryEngines = init.queryEngineTools.map((tool) => tool.queryEngine);
     this.metadatas = init.queryEngineTools.map((tool) => ({
       description: tool.description,
     }));
-    this.summarizer = init.summarizer || new TreeSummarize(init.serviceContext);
+    this.summarizer =
+      init.summarizer || getResponseSynthesizer("tree_summarize");
     this.verbose = init.verbose ?? false;
   }
 
@@ -96,7 +104,7 @@ export class RouterQueryEngine extends PromptMixin implements QueryEngine {
     queryEngineTools: RouterQueryEngineTool[];
     selector?: BaseSelector;
     serviceContext?: ServiceContext;
-    summarizer?: TreeSummarize;
+    summarizer?: BaseSynthesizer;
     verbose?: boolean;
   }) {
     const serviceContext = init.serviceContext;
@@ -114,25 +122,7 @@ export class RouterQueryEngine extends PromptMixin implements QueryEngine {
     });
   }
 
-  query(
-    params: QueryEngineParamsStreaming,
-  ): Promise<AsyncIterable<EngineResponse>>;
-  query(params: QueryEngineParamsNonStreaming): Promise<EngineResponse>;
-  async query(
-    params: QueryEngineParamsStreaming | QueryEngineParamsNonStreaming,
-  ): Promise<EngineResponse | AsyncIterable<EngineResponse>> {
-    const { query, stream } = params;
-
-    const response = await this.queryRoute(query);
-
-    if (stream) {
-      throw new Error("Streaming is not supported yet.");
-    }
-
-    return response;
-  }
-
-  private async queryRoute(query: QueryType): Promise<EngineResponse> {
+  private async queryRoute(query: QueryBundle): Promise<EngineResponse> {
     const result = await this.selector.select(this.metadatas, query);
 
     if (result.selections.length > 1) {
@@ -146,11 +136,7 @@ export class RouterQueryEngine extends PromptMixin implements QueryEngine {
         }
 
         const selectedQueryEngine = this.queryEngines[engineInd.index]!;
-        responses.push(
-          await selectedQueryEngine.query({
-            query: extractText(query),
-          }),
-        );
+        responses.push(await selectedQueryEngine.query(query));
       }
 
       if (responses.length > 1) {
diff --git a/packages/llamaindex/src/engines/query/SubQuestionQueryEngine.ts b/packages/llamaindex/src/engines/query/SubQuestionQueryEngine.ts
index a03de63c0ed061f100703c0eef128404d317b667..cad4e895f8f2790788247c4d9e091dc6dcbe1614 100644
--- a/packages/llamaindex/src/engines/query/SubQuestionQueryEngine.ts
+++ b/packages/llamaindex/src/engines/query/SubQuestionQueryEngine.ts
@@ -1,29 +1,21 @@
-import {
-  EngineResponse,
-  TextNode,
-  type NodeWithScore,
-} from "@llamaindex/core/schema";
+import type { BaseSynthesizer } from "@llamaindex/core/response-synthesizers";
+import { getResponseSynthesizer } from "@llamaindex/core/response-synthesizers";
+import { TextNode, type NodeWithScore } from "@llamaindex/core/schema";
 import { LLMQuestionGenerator } from "../../QuestionGenerator.js";
 import type { ServiceContext } from "../../ServiceContext.js";
-import type { BaseSynthesizer } from "../../synthesizers/index.js";
-import {
-  CompactAndRefine,
-  ResponseSynthesizer,
-} from "../../synthesizers/index.js";
 
 import type { BaseTool, ToolMetadata } from "@llamaindex/core/llms";
-import { PromptMixin, type PromptsRecord } from "@llamaindex/core/prompts";
-import type { BaseQueryEngine, QueryType } from "@llamaindex/core/query-engine";
-import { wrapEventCaller } from "@llamaindex/core/utils";
+import type { PromptsRecord } from "@llamaindex/core/prompts";
+import {
+  BaseQueryEngine,
+  type QueryBundle,
+} from "@llamaindex/core/query-engine";
 import type { BaseQuestionGenerator, SubQuestion } from "./types.js";
 
 /**
  * SubQuestionQueryEngine decomposes a question into subquestions and then
  */
-export class SubQuestionQueryEngine
-  extends PromptMixin
-  implements BaseQueryEngine
-{
+export class SubQuestionQueryEngine extends BaseQueryEngine {
   responseSynthesizer: BaseSynthesizer;
   questionGen: BaseQuestionGenerator;
   queryEngines: BaseTool[];
@@ -34,11 +26,48 @@ export class SubQuestionQueryEngine
     responseSynthesizer: BaseSynthesizer;
     queryEngineTools: BaseTool[];
   }) {
-    super();
+    super(async (strOrQueryBundle, stream) => {
+      let query: QueryBundle;
+      if (typeof strOrQueryBundle === "string") {
+        query = {
+          query: strOrQueryBundle,
+        };
+      } else {
+        query = strOrQueryBundle;
+      }
+      const subQuestions = await this.questionGen.generate(
+        this.metadatas,
+        strOrQueryBundle,
+      );
+
+      const subQNodes = await Promise.all(
+        subQuestions.map((subQ) => this.querySubQ(subQ)),
+      );
+
+      const nodesWithScore: NodeWithScore[] = subQNodes.filter(
+        (node) => node !== null,
+      );
+      if (stream) {
+        return this.responseSynthesizer.synthesize(
+          {
+            query,
+            nodes: nodesWithScore,
+          },
+          true,
+        );
+      }
+      return this.responseSynthesizer.synthesize(
+        {
+          query,
+          nodes: nodesWithScore,
+        },
+        false,
+      );
+    });
 
     this.questionGen = init.questionGen;
     this.responseSynthesizer =
-      init.responseSynthesizer ?? new ResponseSynthesizer();
+      init.responseSynthesizer ?? getResponseSynthesizer("compact");
     this.queryEngines = init.queryEngineTools;
     this.metadatas = init.queryEngineTools.map((tool) => tool.metadata);
   }
@@ -62,15 +91,9 @@ export class SubQuestionQueryEngine
     responseSynthesizer?: BaseSynthesizer;
     serviceContext?: ServiceContext;
   }) {
-    const serviceContext = init.serviceContext;
-
     const questionGen = init.questionGen ?? new LLMQuestionGenerator();
     const responseSynthesizer =
-      init.responseSynthesizer ??
-      new ResponseSynthesizer({
-        responseBuilder: new CompactAndRefine(serviceContext),
-        serviceContext,
-      });
+      init.responseSynthesizer ?? getResponseSynthesizer("compact");
 
     return new SubQuestionQueryEngine({
       questionGen,
@@ -79,40 +102,6 @@ export class SubQuestionQueryEngine
     });
   }
 
-  query(query: QueryType, stream: true): Promise<AsyncIterable<EngineResponse>>;
-  query(query: QueryType, stream?: false): Promise<EngineResponse>;
-  @wrapEventCaller
-  async query(
-    query: QueryType,
-    stream?: boolean,
-  ): Promise<EngineResponse | AsyncIterable<EngineResponse>> {
-    const subQuestions = await this.questionGen.generate(this.metadatas, query);
-
-    const subQNodes = await Promise.all(
-      subQuestions.map((subQ) => this.querySubQ(subQ)),
-    );
-
-    const nodesWithScore = subQNodes
-      .filter((node) => node !== null)
-      .map((node) => node as NodeWithScore);
-    if (stream) {
-      return this.responseSynthesizer.synthesize(
-        {
-          query,
-          nodesWithScore,
-        },
-        true,
-      );
-    }
-    return this.responseSynthesizer.synthesize(
-      {
-        query,
-        nodesWithScore,
-      },
-      false,
-    );
-  }
-
   private async querySubQ(subQ: SubQuestion): Promise<NodeWithScore | null> {
     try {
       const question = subQ.subQuestion;
diff --git a/packages/llamaindex/src/index.edge.ts b/packages/llamaindex/src/index.edge.ts
index 541d9f0248f85c4a547e5bf6730ee03539a7932b..f37f249a2e0729e304360adf49c7fd4a8dc91381 100644
--- a/packages/llamaindex/src/index.edge.ts
+++ b/packages/llamaindex/src/index.edge.ts
@@ -33,6 +33,8 @@ export type {
 export * from "@llamaindex/core/indices";
 export * from "@llamaindex/core/llms";
 export * from "@llamaindex/core/prompts";
+export * from "@llamaindex/core/query-engine";
+export * from "@llamaindex/core/response-synthesizers";
 export * from "@llamaindex/core/schema";
 
 declare module "@llamaindex/core/global" {
@@ -69,6 +71,5 @@ export * from "./selectors/index.js";
 export * from "./ServiceContext.js";
 export { Settings } from "./Settings.js";
 export * from "./storage/StorageContext.js";
-export * from "./synthesizers/index.js";
 export * from "./tools/index.js";
 export * from "./types.js";
diff --git a/packages/llamaindex/src/indices/BaseIndex.ts b/packages/llamaindex/src/indices/BaseIndex.ts
index 7515b7a6383047acbc3cc2e592ca8b75a11f8981..3d5d55c2cf7c650121114ea55eff3cf4780744f2 100644
--- a/packages/llamaindex/src/indices/BaseIndex.ts
+++ b/packages/llamaindex/src/indices/BaseIndex.ts
@@ -1,3 +1,5 @@
+import type { BaseQueryEngine } from "@llamaindex/core/query-engine";
+import type { BaseSynthesizer } from "@llamaindex/core/response-synthesizers";
 import type { BaseNode, Document } from "@llamaindex/core/schema";
 import type { BaseRetriever } from "../Retriever.js";
 import type { ServiceContext } from "../ServiceContext.js";
@@ -6,8 +8,6 @@ import { runTransformations } from "../ingestion/IngestionPipeline.js";
 import type { StorageContext } from "../storage/StorageContext.js";
 import type { BaseDocumentStore } from "../storage/docStore/types.js";
 import type { BaseIndexStore } from "../storage/indexStore/types.js";
-import type { BaseSynthesizer } from "../synthesizers/types.js";
-import type { QueryEngine } from "../types.js";
 import { IndexStruct } from "./IndexStruct.js";
 import { IndexStructType } from "./json-to-index-struct.js";
 
@@ -83,7 +83,7 @@ export abstract class BaseIndex<T> {
   abstract asQueryEngine(options?: {
     retriever?: BaseRetriever;
     responseSynthesizer?: BaseSynthesizer;
-  }): QueryEngine;
+  }): BaseQueryEngine;
 
   /**
    * Insert a document into the index.
diff --git a/packages/llamaindex/src/indices/keyword/index.ts b/packages/llamaindex/src/indices/keyword/index.ts
index 1e527fec08091104e4f2ea29a361cb505f5314ca..6b326317acf223bb97a6a994df6fe49663573175 100644
--- a/packages/llamaindex/src/indices/keyword/index.ts
+++ b/packages/llamaindex/src/indices/keyword/index.ts
@@ -1,3 +1,4 @@
+import type { BaseSynthesizer } from "@llamaindex/core/response-synthesizers";
 import type {
   BaseNode,
   Document,
@@ -12,8 +13,6 @@ import type { BaseNodePostprocessor } from "../../postprocessors/index.js";
 import type { StorageContext } from "../../storage/StorageContext.js";
 import { storageContextFromDefaults } from "../../storage/StorageContext.js";
 import type { BaseDocumentStore } from "../../storage/docStore/types.js";
-import type { BaseSynthesizer } from "../../synthesizers/index.js";
-import type { QueryEngine } from "../../types.js";
 import type { BaseIndexInit } from "../BaseIndex.js";
 import { BaseIndex, KeywordTable } from "../BaseIndex.js";
 import { IndexStructType } from "../json-to-index-struct.js";
@@ -30,6 +29,7 @@ import {
   type KeywordExtractPrompt,
   type QueryKeywordExtractPrompt,
 } from "@llamaindex/core/prompts";
+import type { BaseQueryEngine } from "@llamaindex/core/query-engine";
 import { extractText } from "@llamaindex/core/utils";
 import { llmFromSettingsOrContext } from "../../Settings.js";
 
@@ -237,7 +237,7 @@ export class KeywordTableIndex extends BaseIndex<KeywordTable> {
     responseSynthesizer?: BaseSynthesizer;
     preFilters?: unknown;
     nodePostprocessors?: BaseNodePostprocessor[];
-  }): QueryEngine {
+  }): BaseQueryEngine {
     const { retriever, responseSynthesizer } = options ?? {};
     return new RetrieverQueryEngine(
       retriever ?? this.asRetriever(),
diff --git a/packages/llamaindex/src/indices/summary/index.ts b/packages/llamaindex/src/indices/summary/index.ts
index 43ef6181f433e657325dbf4ecc93fbfe35d485ca..375af4a6a2ca875fa632d96c78a9ec1b9497ed85 100644
--- a/packages/llamaindex/src/indices/summary/index.ts
+++ b/packages/llamaindex/src/indices/summary/index.ts
@@ -2,6 +2,8 @@ import {
   type ChoiceSelectPrompt,
   defaultChoiceSelectPrompt,
 } from "@llamaindex/core/prompts";
+import type { BaseSynthesizer } from "@llamaindex/core/response-synthesizers";
+import { getResponseSynthesizer } from "@llamaindex/core/response-synthesizers";
 import type {
   BaseNode,
   Document,
@@ -23,12 +25,6 @@ import type {
   BaseDocumentStore,
   RefDocInfo,
 } from "../../storage/docStore/types.js";
-import type { BaseSynthesizer } from "../../synthesizers/index.js";
-import {
-  CompactAndRefine,
-  ResponseSynthesizer,
-} from "../../synthesizers/index.js";
-import type { QueryEngine } from "../../types.js";
 import type { BaseIndexInit } from "../BaseIndex.js";
 import { BaseIndex } from "../BaseIndex.js";
 import { IndexList, IndexStructType } from "../json-to-index-struct.js";
@@ -178,7 +174,7 @@ export class SummaryIndex extends BaseIndex<IndexList> {
     responseSynthesizer?: BaseSynthesizer;
     preFilters?: unknown;
     nodePostprocessors?: BaseNodePostprocessor[];
-  }): QueryEngine & RetrieverQueryEngine {
+  }): RetrieverQueryEngine {
     let { retriever, responseSynthesizer } = options ?? {};
 
     if (!retriever) {
@@ -186,11 +182,7 @@ export class SummaryIndex extends BaseIndex<IndexList> {
     }
 
     if (!responseSynthesizer) {
-      const responseBuilder = new CompactAndRefine(this.serviceContext);
-      responseSynthesizer = new ResponseSynthesizer({
-        serviceContext: this.serviceContext,
-        responseBuilder,
-      });
+      responseSynthesizer = getResponseSynthesizer("compact");
     }
 
     return new RetrieverQueryEngine(
diff --git a/packages/llamaindex/src/indices/vectorStore/index.ts b/packages/llamaindex/src/indices/vectorStore/index.ts
index 71ec89c1bca028f1ea313b57e4b1a8c6a83f040a..c8a5bbd9ac1068b7d52b88ca4639e955b73a6527 100644
--- a/packages/llamaindex/src/indices/vectorStore/index.ts
+++ b/packages/llamaindex/src/indices/vectorStore/index.ts
@@ -4,6 +4,7 @@ import {
 } from "@llamaindex/core/embeddings";
 import { Settings } from "@llamaindex/core/global";
 import type { MessageContent } from "@llamaindex/core/llms";
+import type { BaseSynthesizer } from "@llamaindex/core/response-synthesizers";
 import {
   ImageNode,
   ModalityType,
@@ -30,8 +31,6 @@ import type { BaseNodePostprocessor } from "../../postprocessors/types.js";
 import type { StorageContext } from "../../storage/StorageContext.js";
 import { storageContextFromDefaults } from "../../storage/StorageContext.js";
 import type { BaseIndexStore } from "../../storage/indexStore/types.js";
-import type { BaseSynthesizer } from "../../synthesizers/types.js";
-import type { QueryEngine } from "../../types.js";
 import type {
   MetadataFilters,
   VectorStore,
@@ -288,7 +287,7 @@ export class VectorStoreIndex extends BaseIndex<IndexDict> {
     preFilters?: MetadataFilters;
     nodePostprocessors?: BaseNodePostprocessor[];
     similarityTopK?: number;
-  }): QueryEngine & RetrieverQueryEngine {
+  }): RetrieverQueryEngine {
     const {
       retriever,
       responseSynthesizer,
diff --git a/packages/llamaindex/src/synthesizers/MultiModalResponseSynthesizer.ts b/packages/llamaindex/src/synthesizers/MultiModalResponseSynthesizer.ts
deleted file mode 100644
index 0d38964364f39860269241d2795136fa3b0c7e60..0000000000000000000000000000000000000000
--- a/packages/llamaindex/src/synthesizers/MultiModalResponseSynthesizer.ts
+++ /dev/null
@@ -1,88 +0,0 @@
-import {
-  defaultTextQAPrompt,
-  PromptMixin,
-  type ModuleRecord,
-  type TextQAPrompt,
-} from "@llamaindex/core/prompts";
-import { EngineResponse, MetadataMode } from "@llamaindex/core/schema";
-import { streamConverter } from "@llamaindex/core/utils";
-import type { ServiceContext } from "../ServiceContext.js";
-import { llmFromSettingsOrContext } from "../Settings.js";
-import type { BaseSynthesizer, SynthesizeQuery } from "./types.js";
-import { createMessageContent } from "./utils.js";
-
-export class MultiModalResponseSynthesizer
-  extends PromptMixin
-  implements BaseSynthesizer
-{
-  serviceContext?: ServiceContext | undefined;
-  metadataMode: MetadataMode;
-  textQATemplate: TextQAPrompt;
-
-  constructor({
-    serviceContext,
-    textQATemplate,
-    metadataMode,
-  }: Partial<MultiModalResponseSynthesizer> = {}) {
-    super();
-
-    this.serviceContext = serviceContext;
-    this.metadataMode = metadataMode ?? MetadataMode.NONE;
-    this.textQATemplate = textQATemplate ?? defaultTextQAPrompt;
-  }
-
-  protected _getPromptModules(): ModuleRecord {
-    return {};
-  }
-
-  protected _getPrompts(): { textQATemplate: TextQAPrompt } {
-    return {
-      textQATemplate: this.textQATemplate,
-    };
-  }
-
-  protected _updatePrompts(promptsDict: {
-    textQATemplate: TextQAPrompt;
-  }): void {
-    if (promptsDict.textQATemplate) {
-      this.textQATemplate = promptsDict.textQATemplate;
-    }
-  }
-
-  synthesize(
-    query: SynthesizeQuery,
-    stream: true,
-  ): Promise<AsyncIterable<EngineResponse>>;
-  synthesize(query: SynthesizeQuery, stream?: false): Promise<EngineResponse>;
-  async synthesize(
-    query: SynthesizeQuery,
-    stream?: boolean,
-  ): Promise<AsyncIterable<EngineResponse> | EngineResponse> {
-    const { nodesWithScore } = query;
-    const nodes = nodesWithScore.map(({ node }) => node);
-    const prompt = await createMessageContent(
-      this.textQATemplate,
-      nodes,
-      // fixme: wtf type is this?
-      // { query },
-      {},
-      this.metadataMode,
-    );
-
-    const llm = llmFromSettingsOrContext(this.serviceContext);
-
-    if (stream) {
-      const response = await llm.complete({
-        prompt,
-        stream,
-      });
-      return streamConverter(response, ({ text }) =>
-        EngineResponse.fromResponse(text, true, nodesWithScore),
-      );
-    }
-    const response = await llm.complete({
-      prompt,
-    });
-    return EngineResponse.fromResponse(response.text, false, nodesWithScore);
-  }
-}
diff --git a/packages/llamaindex/src/synthesizers/ResponseSynthesizer.ts b/packages/llamaindex/src/synthesizers/ResponseSynthesizer.ts
deleted file mode 100644
index d7d68be607026d6a9542bb6ac05b8503a357860b..0000000000000000000000000000000000000000
--- a/packages/llamaindex/src/synthesizers/ResponseSynthesizer.ts
+++ /dev/null
@@ -1,87 +0,0 @@
-import { PromptMixin, type PromptsRecord } from "@llamaindex/core/prompts";
-import { EngineResponse, MetadataMode } from "@llamaindex/core/schema";
-import { streamConverter } from "@llamaindex/core/utils";
-import type { ServiceContext } from "../ServiceContext.js";
-import { getResponseBuilder } from "./builders.js";
-import type {
-  BaseSynthesizer,
-  ResponseBuilder,
-  SynthesizeQuery,
-} from "./types.js";
-
-/**
- * A ResponseSynthesizer is used to generate a response from a query and a list of nodes.
- */
-export class ResponseSynthesizer
-  extends PromptMixin
-  implements BaseSynthesizer
-{
-  responseBuilder: ResponseBuilder;
-  metadataMode: MetadataMode;
-
-  constructor({
-    responseBuilder,
-    serviceContext,
-    metadataMode = MetadataMode.NONE,
-  }: {
-    responseBuilder?: ResponseBuilder | undefined;
-    serviceContext?: ServiceContext | undefined;
-    metadataMode?: MetadataMode | undefined;
-  } = {}) {
-    super();
-
-    this.responseBuilder =
-      responseBuilder ?? getResponseBuilder(serviceContext);
-    this.metadataMode = metadataMode;
-  }
-
-  _getPromptModules() {
-    return {};
-  }
-
-  protected _getPrompts() {
-    const prompts = this.responseBuilder.getPrompts?.();
-    return {
-      ...prompts,
-    };
-  }
-
-  protected _updatePrompts(promptsRecord: PromptsRecord): void {
-    this.responseBuilder.updatePrompts?.(promptsRecord);
-  }
-
-  synthesize(
-    query: SynthesizeQuery,
-    stream: true,
-  ): Promise<AsyncIterable<EngineResponse>>;
-  synthesize(query: SynthesizeQuery, stream?: false): Promise<EngineResponse>;
-  async synthesize(
-    query: SynthesizeQuery,
-    stream?: boolean,
-  ): Promise<AsyncIterable<EngineResponse> | EngineResponse> {
-    const { nodesWithScore } = query;
-    const textChunks: string[] = nodesWithScore.map(({ node }) =>
-      node.getContent(this.metadataMode),
-    );
-    if (stream) {
-      const response = await this.responseBuilder.getResponse(
-        {
-          ...query,
-          textChunks,
-        },
-        true,
-      );
-      return streamConverter(response, (chunk) =>
-        EngineResponse.fromResponse(chunk, true, nodesWithScore),
-      );
-    }
-    const response = await this.responseBuilder.getResponse(
-      {
-        ...query,
-        textChunks,
-      },
-      false,
-    );
-    return EngineResponse.fromResponse(response, false, nodesWithScore);
-  }
-}
diff --git a/packages/llamaindex/src/synthesizers/index.ts b/packages/llamaindex/src/synthesizers/index.ts
deleted file mode 100644
index 52ef00d56e9b25f6f68bc3f7ec44f5072aeb5438..0000000000000000000000000000000000000000
--- a/packages/llamaindex/src/synthesizers/index.ts
+++ /dev/null
@@ -1,4 +0,0 @@
-export * from "./builders.js";
-export * from "./MultiModalResponseSynthesizer.js";
-export * from "./ResponseSynthesizer.js";
-export * from "./types.js";
diff --git a/packages/llamaindex/src/synthesizers/types.ts b/packages/llamaindex/src/synthesizers/types.ts
deleted file mode 100644
index 8c28ae332530e62e27cfb8993f4c82c27018bd69..0000000000000000000000000000000000000000
--- a/packages/llamaindex/src/synthesizers/types.ts
+++ /dev/null
@@ -1,40 +0,0 @@
-import type { PromptMixin } from "@llamaindex/core/prompts";
-import type { QueryType } from "@llamaindex/core/query-engine";
-import { EngineResponse, type NodeWithScore } from "@llamaindex/core/schema";
-
-export interface SynthesizeQuery {
-  query: QueryType;
-  nodesWithScore: NodeWithScore[];
-}
-
-// todo(himself65): Move this to @llamaindex/core/schema
-/**
- * A BaseSynthesizer is used to generate a response from a query and a list of nodes.
- */
-export interface BaseSynthesizer extends PromptMixin {
-  synthesize(
-    query: SynthesizeQuery,
-    stream: true,
-  ): Promise<AsyncIterable<EngineResponse>>;
-  synthesize(query: SynthesizeQuery, stream?: false): Promise<EngineResponse>;
-}
-
-export interface ResponseBuilderQuery {
-  query: QueryType;
-  textChunks: string[];
-  prevResponse?: string | undefined;
-}
-
-/**
- * A ResponseBuilder is used in a response synthesizer to generate a response from multiple response chunks.
- */
-export interface ResponseBuilder extends PromptMixin {
-  /**
-   * Get the response from a query and a list of text chunks.
-   */
-  getResponse(
-    query: ResponseBuilderQuery,
-    stream: true,
-  ): Promise<AsyncIterable<string>>;
-  getResponse(query: ResponseBuilderQuery, stream?: false): Promise<string>;
-}
diff --git a/packages/llamaindex/src/synthesizers/utils.ts b/packages/llamaindex/src/synthesizers/utils.ts
deleted file mode 100644
index fee771dd8dd32b08a2cc8319bf07cf081b16a4f9..0000000000000000000000000000000000000000
--- a/packages/llamaindex/src/synthesizers/utils.ts
+++ /dev/null
@@ -1,72 +0,0 @@
-import type { MessageContentDetail } from "@llamaindex/core/llms";
-import type { BasePromptTemplate } from "@llamaindex/core/prompts";
-import {
-  ImageNode,
-  MetadataMode,
-  ModalityType,
-  splitNodesByType,
-  type BaseNode,
-} from "@llamaindex/core/schema";
-import { imageToDataUrl } from "../internal/utils.js";
-
-export async function createMessageContent(
-  prompt: BasePromptTemplate,
-  nodes: BaseNode[],
-  extraParams: Record<string, string | undefined> = {},
-  metadataMode: MetadataMode = MetadataMode.NONE,
-): Promise<MessageContentDetail[]> {
-  const content: MessageContentDetail[] = [];
-  const nodeMap = splitNodesByType(nodes);
-  for (const type in nodeMap) {
-    // for each retrieved modality type, create message content
-    const nodes = nodeMap[type as ModalityType];
-    if (nodes) {
-      content.push(
-        ...(await createContentPerModality(
-          prompt,
-          type as ModalityType,
-          nodes,
-          extraParams,
-          metadataMode,
-        )),
-      );
-    }
-  }
-  return content;
-}
-
-// eslint-disable-next-line max-params
-async function createContentPerModality(
-  prompt: BasePromptTemplate,
-  type: ModalityType,
-  nodes: BaseNode[],
-  extraParams: Record<string, string | undefined>,
-  metadataMode: MetadataMode,
-): Promise<MessageContentDetail[]> {
-  switch (type) {
-    case ModalityType.TEXT:
-      return [
-        {
-          type: "text",
-          text: prompt.format({
-            ...extraParams,
-            context: nodes.map((r) => r.getContent(metadataMode)).join("\n\n"),
-          }),
-        },
-      ];
-    case ModalityType.IMAGE:
-      const images: MessageContentDetail[] = await Promise.all(
-        (nodes as ImageNode[]).map(async (node) => {
-          return {
-            type: "image_url",
-            image_url: {
-              url: await imageToDataUrl(node.image),
-            },
-          } satisfies MessageContentDetail;
-        }),
-      );
-      return images;
-    default:
-      return [];
-  }
-}
diff --git a/packages/llamaindex/src/types.ts b/packages/llamaindex/src/types.ts
index ad36e9b3ea0701b2569845c057876aa688109bd8..4bdb026869e9c322cbf7e62131345506a5f1c550 100644
--- a/packages/llamaindex/src/types.ts
+++ b/packages/llamaindex/src/types.ts
@@ -2,36 +2,6 @@
  * Top level types to avoid circular dependencies
  */
 import type { ToolMetadata } from "@llamaindex/core/llms";
-import type { EngineResponse } from "@llamaindex/core/schema";
-
-/**
- * Parameters for sending a query.
- */
-export interface QueryEngineParamsBase {
-  query: string;
-}
-
-export interface QueryEngineParamsStreaming extends QueryEngineParamsBase {
-  stream: true;
-}
-
-export interface QueryEngineParamsNonStreaming extends QueryEngineParamsBase {
-  stream?: false | null;
-}
-
-/**
- * A query engine is a question answerer that can use one or more steps.
- */
-export interface QueryEngine {
-  /**
-   * Query the query engine and get a response.
-   * @param params
-   */
-  query(
-    params: QueryEngineParamsStreaming,
-  ): Promise<AsyncIterable<EngineResponse>>;
-  query(params: QueryEngineParamsNonStreaming): Promise<EngineResponse>;
-}
 
 /**
  * StructuredOutput is just a combo of the raw output and the parsed output.
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index a2ec1c26847820eb31edb06d57558277b5b3f3d8..8931b8db0794fb2454d0c0a24d662605cfe79ffe 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -167,7 +167,7 @@ importers:
         version: link:../packages/llamaindex
       mongodb:
         specifier: ^6.7.0
-        version: 6.8.0(@aws-sdk/credential-providers@3.650.0(@aws-sdk/client-sso-oidc@3.650.0(@aws-sdk/client-sts@3.650.0)))
+        version: 6.8.0(@aws-sdk/credential-providers@3.650.0)
       pathe:
         specifier: ^1.1.2
         version: 1.1.2
@@ -388,6 +388,9 @@ importers:
       '@types/node':
         specifier: ^22.5.1
         version: 22.5.4
+      magic-bytes.js:
+        specifier: ^1.10.0
+        version: 1.10.0
       zod:
         specifier: ^3.23.8
         version: 3.23.8
@@ -603,7 +606,7 @@ importers:
         version: 2.0.0
       mongodb:
         specifier: ^6.7.0
-        version: 6.8.0(@aws-sdk/credential-providers@3.650.0(@aws-sdk/client-sso-oidc@3.650.0(@aws-sdk/client-sts@3.650.0)))
+        version: 6.8.0(@aws-sdk/credential-providers@3.650.0)
       notion-md-crawler:
         specifier: ^1.0.0
         version: 1.0.0(encoding@0.1.13)
@@ -18297,7 +18300,7 @@ snapshots:
       '@typescript-eslint/parser': 7.2.0(eslint@8.57.0)(typescript@5.6.2)
       eslint: 8.57.0
       eslint-import-resolver-node: 0.3.9
-      eslint-import-resolver-typescript: 3.6.3(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.6.2))(eslint@8.57.0))(eslint@8.57.0)
+      eslint-import-resolver-typescript: 3.6.3(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1)(eslint@8.57.0)
       eslint-plugin-import: 2.29.1(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-typescript@3.6.3)(eslint@8.57.0)
       eslint-plugin-jsx-a11y: 6.9.0(eslint@8.57.0)
       eslint-plugin-react: 7.35.0(eslint@8.57.0)
@@ -18345,25 +18348,6 @@ snapshots:
     transitivePeerDependencies:
       - supports-color
 
-  eslint-import-resolver-typescript@3.6.3(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.6.2))(eslint@8.57.0))(eslint@8.57.0):
-    dependencies:
-      '@nolyfill/is-core-module': 1.0.39
-      debug: 4.3.7
-      enhanced-resolve: 5.17.1
-      eslint: 8.57.0
-      eslint-module-utils: 2.8.2(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.3(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.6.2))(eslint@8.57.0))(eslint@8.57.0))(eslint@8.57.0)
-      fast-glob: 3.3.2
-      get-tsconfig: 4.8.0
-      is-bun-module: 1.1.0
-      is-glob: 4.0.3
-    optionalDependencies:
-      eslint-plugin-import: 2.29.1(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-typescript@3.6.3)(eslint@8.57.0)
-    transitivePeerDependencies:
-      - '@typescript-eslint/parser'
-      - eslint-import-resolver-node
-      - eslint-import-resolver-webpack
-      - supports-color
-
   eslint-import-resolver-typescript@3.6.3(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1)(eslint@8.57.0):
     dependencies:
       '@nolyfill/is-core-module': 1.0.39
@@ -18376,24 +18360,13 @@ snapshots:
       is-bun-module: 1.1.0
       is-glob: 4.0.3
     optionalDependencies:
-      eslint-plugin-import: 2.29.1(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-typescript@3.6.3)(eslint@8.57.0)
+      eslint-plugin-import: 2.29.1(@typescript-eslint/parser@8.5.0(eslint@8.57.0)(typescript@5.6.2))(eslint@8.57.0)
     transitivePeerDependencies:
       - '@typescript-eslint/parser'
       - eslint-import-resolver-node
       - eslint-import-resolver-webpack
       - supports-color
 
-  eslint-module-utils@2.8.2(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.3(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.6.2))(eslint@8.57.0))(eslint@8.57.0))(eslint@8.57.0):
-    dependencies:
-      debug: 3.2.7
-    optionalDependencies:
-      '@typescript-eslint/parser': 7.2.0(eslint@8.57.0)(typescript@5.6.2)
-      eslint: 8.57.0
-      eslint-import-resolver-node: 0.3.9
-      eslint-import-resolver-typescript: 3.6.3(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.6.2))(eslint@8.57.0))(eslint@8.57.0)
-    transitivePeerDependencies:
-      - supports-color
-
   eslint-module-utils@2.8.2(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.3(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1)(eslint@8.57.0))(eslint@8.57.0):
     dependencies:
       debug: 3.2.7
@@ -18415,7 +18388,7 @@ snapshots:
       doctrine: 2.1.0
       eslint: 8.57.0
       eslint-import-resolver-node: 0.3.9
-      eslint-module-utils: 2.8.2(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.3(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.6.2))(eslint@8.57.0))(eslint@8.57.0))(eslint@8.57.0)
+      eslint-module-utils: 2.8.2(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.3(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1)(eslint@8.57.0))(eslint@8.57.0)
       hasown: 2.0.2
       is-core-module: 2.15.1
       is-glob: 4.0.3
@@ -21144,7 +21117,7 @@ snapshots:
     optionalDependencies:
       '@aws-sdk/credential-providers': 3.650.0(@aws-sdk/client-sso-oidc@3.650.0(@aws-sdk/client-sts@3.650.0))
 
-  mongodb@6.8.0(@aws-sdk/credential-providers@3.650.0(@aws-sdk/client-sso-oidc@3.650.0(@aws-sdk/client-sts@3.650.0))):
+  mongodb@6.8.0(@aws-sdk/credential-providers@3.650.0):
     dependencies:
       '@mongodb-js/saslprep': 1.1.7
       bson: 6.8.0