Skip to content
Snippets Groups Projects
Unverified Commit 8c5a30db authored by Timothy Carambat's avatar Timothy Carambat Committed by GitHub
Browse files

Support dynamic context length - VoyageAI (#1489)

parent a256db13
No related branches found
No related tags found
No related merge requests found
......@@ -394,16 +394,17 @@ export default function GeneralLLMPreference() {
>
<div className="flex gap-x-4 items-center">
<img
src={selectedLLMObject.logo}
alt={`${selectedLLMObject.name} logo`}
src={selectedLLMObject?.logo || AnythingLLMIcon}
alt={`${selectedLLMObject?.name} logo`}
className="w-10 h-10 rounded-md"
/>
<div className="flex flex-col text-left">
<div className="text-sm font-semibold text-white">
{selectedLLMObject.name}
{selectedLLMObject?.name || "None selected"}
</div>
<div className="mt-1 text-xs text-[#D2D5DB]">
{selectedLLMObject.description}
{selectedLLMObject?.description ||
"You need to select an LLM"}
</div>
</div>
</div>
......
......@@ -15,7 +15,22 @@ class VoyageAiEmbedder {
// Limit of how many strings we can process in a single pass to stay with resource or network limits
this.batchSize = 128; // Voyage AI's limit per request is 128 https://docs.voyageai.com/docs/rate-limits#use-larger-batches
this.embeddingMaxChunkLength = 4000; // https://docs.voyageai.com/docs/embeddings - assume a token is roughly 4 letters with some padding
this.embeddingMaxChunkLength = this.#getMaxEmbeddingLength();
}
// https://docs.voyageai.com/docs/embeddings
#getMaxEmbeddingLength() {
switch (this.model) {
case "voyage-large-2-instruct":
case "voyage-law-2":
case "voyage-code-2":
case "voyage-large-2":
return 16_000;
case "voyage-2":
return 4_000;
default:
return 4_000;
}
}
async embedTextInput(textInput) {
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment