Skip to content
Snippets Groups Projects
Commit e2a0876d authored by Marcus Schiesser's avatar Marcus Schiesser Committed by Marcus Schiesser
Browse files

fix: Remove chunk size limit for prompt helper (use LLM default)

parent a75d899a
No related branches found
No related tags found
No related merge requests found
---
"@llamaindex/core": patch
---
Remove chunk size limit for prompt helper (use LLM default)
import { type Tokenizer, tokenizers } from "@llamaindex/env"; import { type Tokenizer, tokenizers } from "@llamaindex/env";
import { import {
DEFAULT_CHUNK_OVERLAP_RATIO, DEFAULT_CHUNK_OVERLAP_RATIO,
DEFAULT_CHUNK_SIZE,
DEFAULT_CONTEXT_WINDOW, DEFAULT_CONTEXT_WINDOW,
DEFAULT_NUM_OUTPUTS, DEFAULT_NUM_OUTPUTS,
DEFAULT_PADDING, DEFAULT_PADDING,
...@@ -171,7 +170,7 @@ export class PromptHelper { ...@@ -171,7 +170,7 @@ export class PromptHelper {
) { ) {
const { const {
chunkOverlapRatio = DEFAULT_CHUNK_OVERLAP_RATIO, chunkOverlapRatio = DEFAULT_CHUNK_OVERLAP_RATIO,
chunkSizeLimit = DEFAULT_CHUNK_SIZE, chunkSizeLimit = undefined,
tokenizer = Settings.tokenizer, tokenizer = Settings.tokenizer,
separator = " ", separator = " ",
} = options ?? {}; } = options ?? {};
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment