diff --git a/packages/core/package.json b/packages/core/package.json
index 54fcef067681fe11ea3d31480a3ca284ab1c0314..d1bd24cd53f503e75a9576c3d39553b9dd903cea 100644
--- a/packages/core/package.json
+++ b/packages/core/package.json
@@ -129,6 +129,20 @@
         "types": "./dist/prompts/index.d.ts",
         "default": "./dist/prompts/index.js"
       }
+    },
+    "./indices": {
+      "require": {
+        "types": "./dist/indices/index.d.cts",
+        "default": "./dist/indices/index.cjs"
+      },
+      "import": {
+        "types": "./dist/indices/index.d.ts",
+        "default": "./dist/indices/index.js"
+      },
+      "default": {
+        "types": "./dist/indices/index.d.ts",
+        "default": "./dist/indices/index.js"
+      }
     }
   },
   "files": [
diff --git a/packages/core/src/indices/index.ts b/packages/core/src/indices/index.ts
new file mode 100644
index 0000000000000000000000000000000000000000..5545c65aa8c84f1dcc8167de6e2277e4711feed7
--- /dev/null
+++ b/packages/core/src/indices/index.ts
@@ -0,0 +1,5 @@
+export {
+  PromptHelper,
+  getBiggestPrompt,
+  type PromptHelperOptions,
+} from "./prompt-helper";
diff --git a/packages/llamaindex/src/PromptHelper.ts b/packages/core/src/indices/prompt-helper.ts
similarity index 79%
rename from packages/llamaindex/src/PromptHelper.ts
rename to packages/core/src/indices/prompt-helper.ts
index c4618ecde56f3fe0baee0cdcea1b48be28a42c2d..473885efd9f4006bf64dd4756b8f6f4debbb32d4 100644
--- a/packages/llamaindex/src/PromptHelper.ts
+++ b/packages/core/src/indices/prompt-helper.ts
@@ -1,17 +1,17 @@
+import { type Tokenizer, tokenizers } from "@llamaindex/env";
 import {
   DEFAULT_CHUNK_OVERLAP_RATIO,
   DEFAULT_CONTEXT_WINDOW,
   DEFAULT_NUM_OUTPUTS,
   DEFAULT_PADDING,
-} from "@llamaindex/core/global";
-import { SentenceSplitter } from "@llamaindex/core/node-parser";
-import type { PromptTemplate } from "@llamaindex/core/prompts";
-import { type Tokenizer, tokenizers } from "@llamaindex/env";
+} from "../global";
+import { SentenceSplitter } from "../node-parser";
+import type { PromptTemplate } from "../prompts";
 
 /**
  * Get the empty prompt text given a prompt.
  */
-export function getEmptyPromptTxt(prompt: PromptTemplate) {
+function getEmptyPromptTxt(prompt: PromptTemplate) {
   return prompt.format({
     ...Object.fromEntries(
       [...prompt.templateVars.keys()].map((key) => [key, ""]),
@@ -31,6 +31,15 @@ export function getBiggestPrompt(prompts: PromptTemplate[]) {
   return prompts[maxEmptyPromptIndex];
 }
 
+export type PromptHelperOptions = {
+  contextWindow?: number;
+  numOutput?: number;
+  chunkOverlapRatio?: number;
+  chunkSizeLimit?: number;
+  tokenizer?: Tokenizer;
+  separator?: string;
+};
+
 /**
  * A collection of helper functions for working with prompts.
  */
@@ -42,15 +51,15 @@ export class PromptHelper {
   tokenizer: Tokenizer;
   separator = " ";
 
-  // eslint-disable-next-line max-params
-  constructor(
-    contextWindow = DEFAULT_CONTEXT_WINDOW,
-    numOutput = DEFAULT_NUM_OUTPUTS,
-    chunkOverlapRatio = DEFAULT_CHUNK_OVERLAP_RATIO,
-    chunkSizeLimit?: number,
-    tokenizer?: Tokenizer,
-    separator = " ",
-  ) {
+  constructor(options: PromptHelperOptions = {}) {
+    const {
+      contextWindow = DEFAULT_CONTEXT_WINDOW,
+      numOutput = DEFAULT_NUM_OUTPUTS,
+      chunkOverlapRatio = DEFAULT_CHUNK_OVERLAP_RATIO,
+      chunkSizeLimit,
+      tokenizer,
+      separator = " ",
+    } = options;
     this.contextWindow = contextWindow;
     this.numOutput = numOutput;
     this.chunkOverlapRatio = chunkOverlapRatio;
@@ -79,7 +88,7 @@ export class PromptHelper {
     prompt: PromptTemplate,
     numChunks = 1,
     padding = 5,
-  ) {
+  ): number {
     const availableContextSize = this.getAvailableContextSize(prompt);
 
     const result = Math.floor(availableContextSize / numChunks) - padding;
@@ -104,7 +113,12 @@ export class PromptHelper {
       throw new Error("Got 0 as available chunk size");
     }
     const chunkOverlap = this.chunkOverlapRatio * chunkSize;
-    return new SentenceSplitter({ chunkSize, chunkOverlap });
+    return new SentenceSplitter({
+      chunkSize,
+      chunkOverlap,
+      separator: this.separator,
+      tokenizer: this.tokenizer,
+    });
   }
 
   /**
diff --git a/packages/llamaindex/src/ServiceContext.ts b/packages/llamaindex/src/ServiceContext.ts
index 8cb24c94c54e5caf50adf16bf3d4473a56fba944..c6c1885f295dc3af95b3e4046b5a1b7319c4a4a9 100644
--- a/packages/llamaindex/src/ServiceContext.ts
+++ b/packages/llamaindex/src/ServiceContext.ts
@@ -1,10 +1,10 @@
 import type { BaseEmbedding } from "@llamaindex/core/embeddings";
+import { PromptHelper } from "@llamaindex/core/indices";
 import type { LLM } from "@llamaindex/core/llms";
 import {
   type NodeParser,
   SentenceSplitter,
 } from "@llamaindex/core/node-parser";
-import { PromptHelper } from "./PromptHelper.js";
 import { OpenAIEmbedding } from "./embeddings/OpenAIEmbedding.js";
 import { OpenAI } from "./llm/openai.js";
 
diff --git a/packages/llamaindex/src/Settings.ts b/packages/llamaindex/src/Settings.ts
index 0fb712d6c48f0c834353eca6924f7ddee7034a79..778a6fee42c4a1a9c1f41fa60801c12c69401bb6 100644
--- a/packages/llamaindex/src/Settings.ts
+++ b/packages/llamaindex/src/Settings.ts
@@ -4,7 +4,7 @@ import {
 } from "@llamaindex/core/global";
 import { OpenAI } from "./llm/openai.js";
 
-import { PromptHelper } from "./PromptHelper.js";
+import { PromptHelper } from "@llamaindex/core/indices";
 
 import type { BaseEmbedding } from "@llamaindex/core/embeddings";
 import type { LLM } from "@llamaindex/core/llms";
diff --git a/packages/llamaindex/src/index.edge.ts b/packages/llamaindex/src/index.edge.ts
index 86908724560970e815f1c96866ba27bdc614bcaf..b23086cd550c07cf653ab39defdfb7860b5d66a7 100644
--- a/packages/llamaindex/src/index.edge.ts
+++ b/packages/llamaindex/src/index.edge.ts
@@ -30,6 +30,7 @@ export type {
   LLMToolCallEvent,
   LLMToolResultEvent,
 } from "@llamaindex/core/global";
+export * from "@llamaindex/core/indices";
 export * from "@llamaindex/core/llms";
 export * from "@llamaindex/core/prompts";
 export * from "@llamaindex/core/schema";
@@ -62,7 +63,6 @@ export * from "./nodeParsers/index.js";
 export * from "./objects/index.js";
 export * from "./OutputParser.js";
 export * from "./postprocessors/index.js";
-export * from "./PromptHelper.js";
 export * from "./QuestionGenerator.js";
 export * from "./Retriever.js";
 export * from "./selectors/index.js";
diff --git a/packages/llamaindex/src/synthesizers/builders.ts b/packages/llamaindex/src/synthesizers/builders.ts
index f5abc23350448c8942e562a3e6bd8d2258efba18..b8709bd28002114e94e667937e532b6e75b55bf8 100644
--- a/packages/llamaindex/src/synthesizers/builders.ts
+++ b/packages/llamaindex/src/synthesizers/builders.ts
@@ -1,3 +1,4 @@
+import { getBiggestPrompt, type PromptHelper } from "@llamaindex/core/indices";
 import type { LLM } from "@llamaindex/core/llms";
 import {
   PromptMixin,
@@ -12,7 +13,6 @@ import {
 } from "@llamaindex/core/prompts";
 import type { QueryType } from "@llamaindex/core/query-engine";
 import { extractText, streamConverter } from "@llamaindex/core/utils";
-import { getBiggestPrompt, type PromptHelper } from "../PromptHelper.js";
 import type { ServiceContext } from "../ServiceContext.js";
 import {
   llmFromSettingsOrContext,