Skip to content
Snippets Groups Projects
Commit 5c5f4c1c authored by Marcus Schiesser's avatar Marcus Schiesser
Browse files

Revert "feat: support calculate llama 2 tokens (#676)"

This reverts commit 041acd11.
parent 949d3302
No related branches found
No related tags found
No related merge requests found
---
"llamaindex": patch
"@llamaindex/edge": patch
---
feat: support calculate Llama2 model tokens size
......@@ -25,7 +25,6 @@
"chromadb": "~1.7.3",
"cohere-ai": "^7.7.5",
"js-tiktoken": "^1.0.10",
"llama-tokenizer-js": "^1.2.1",
"lodash": "^4.17.21",
"magic-bytes.js": "^1.10.0",
"mammoth": "^1.6.0",
......@@ -42,8 +41,8 @@
"rake-modified": "^1.0.8",
"replicate": "^0.25.2",
"string-strip-html": "^13.4.6",
"wikipedia": "^2.1.2",
"wink-nlp": "^1.14.3"
"wink-nlp": "^1.14.3",
"wikipedia": "^2.1.2"
},
"devDependencies": {
"@swc/cli": "^0.3.9",
......
......@@ -8,7 +8,6 @@ import {
type StreamCallbackResponse,
} from "../callbacks/CallbackManager.js";
import llamaTokenizer from "llama-tokenizer-js";
import type { ChatCompletionMessageParam } from "openai/resources/index.js";
import type { LLMOptions } from "portkey-ai";
import { Tokenizers, globalsHelper } from "../GlobalsHelper.js";
......@@ -416,8 +415,7 @@ export class LlamaDeuce extends BaseLLM {
}
tokens(messages: ChatMessage[]): number {
return llamaTokenizer.encode(this.mapMessagesToPrompt(messages).prompt)
.length;
throw new Error("Method not implemented.");
}
get metadata() {
......
......@@ -24,7 +24,6 @@
"chromadb": "~1.7.3",
"cohere-ai": "^7.7.5",
"js-tiktoken": "^1.0.10",
"llama-tokenizer-js": "^1.2.1",
"lodash": "^4.17.21",
"magic-bytes.js": "^1.10.0",
"mammoth": "^1.6.0",
......@@ -41,8 +40,8 @@
"rake-modified": "^1.0.8",
"replicate": "^0.25.2",
"string-strip-html": "^13.4.6",
"wikipedia": "^2.1.2",
"wink-nlp": "^1.14.3"
"wink-nlp": "^1.14.3",
"wikipedia": "^2.1.2"
},
"engines": {
"node": ">=18.0.0"
......
......@@ -228,9 +228,6 @@ importers:
js-tiktoken:
specifier: ^1.0.10
version: 1.0.10
llama-tokenizer-js:
specifier: ^1.2.1
version: 1.2.1
lodash:
specifier: ^4.17.21
version: 4.17.21
......@@ -376,9 +373,6 @@ importers:
js-tiktoken:
specifier: ^1.0.10
version: 1.0.10
llama-tokenizer-js:
specifier: ^1.2.1
version: 1.2.1
lodash:
specifier: ^4.17.21
version: 4.17.21
......@@ -9809,10 +9803,6 @@ packages:
wrap-ansi: 9.0.0
dev: true
 
/llama-tokenizer-js@1.2.1:
resolution: {integrity: sha512-SEVVc++cXR0D0Wv30AzMVWzPCAKM701vZYU31h5lCTIn4k5cfZpJ070YDcb2nPq2Ts3xgu44L19wIrq1z/XjXQ==}
dev: false
/load-yaml-file@0.2.0:
resolution: {integrity: sha512-OfCBkGEw4nN6JLtgRidPX6QxjBQGQf72q3si2uvqyFEMbycSFFHwAZeXx6cJgFM9wmLrf9zBwCP3Ivqa+LLZPw==}
engines: {node: '>=6'}
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment