From e2a0876ddd4d207d3df43fd65e3868585aaaf6f5 Mon Sep 17 00:00:00 2001 From: Marcus Schiesser <mail@marcusschiesser.de> Date: Thu, 24 Oct 2024 15:39:57 +0700 Subject: [PATCH] fix: Remove chunk size limit for prompt helper (use LLM default) --- .changeset/late-apricots-yell.md | 5 +++++ packages/core/src/indices/prompt-helper.ts | 3 +-- 2 files changed, 6 insertions(+), 2 deletions(-) create mode 100644 .changeset/late-apricots-yell.md diff --git a/.changeset/late-apricots-yell.md b/.changeset/late-apricots-yell.md new file mode 100644 index 000000000..96c869458 --- /dev/null +++ b/.changeset/late-apricots-yell.md @@ -0,0 +1,5 @@ +--- +"@llamaindex/core": patch +--- + +Remove chunk size limit for prompt helper (use LLM default) diff --git a/packages/core/src/indices/prompt-helper.ts b/packages/core/src/indices/prompt-helper.ts index 960ffe31f..477c5037c 100644 --- a/packages/core/src/indices/prompt-helper.ts +++ b/packages/core/src/indices/prompt-helper.ts @@ -1,7 +1,6 @@ import { type Tokenizer, tokenizers } from "@llamaindex/env"; import { DEFAULT_CHUNK_OVERLAP_RATIO, - DEFAULT_CHUNK_SIZE, DEFAULT_CONTEXT_WINDOW, DEFAULT_NUM_OUTPUTS, DEFAULT_PADDING, @@ -171,7 +170,7 @@ export class PromptHelper { ) { const { chunkOverlapRatio = DEFAULT_CHUNK_OVERLAP_RATIO, - chunkSizeLimit = DEFAULT_CHUNK_SIZE, + chunkSizeLimit = undefined, tokenizer = Settings.tokenizer, separator = " ", } = options ?? {}; -- GitLab