From 041acd11fe913593516ba4782684dc5f5b7b6f5f Mon Sep 17 00:00:00 2001
From: Alex Yang <himself65@outlook.com>
Date: Fri, 29 Mar 2024 20:12:26 -0500
Subject: [PATCH] feat: support calculate llama 2 tokens (#676)

---
 .changeset/khaki-books-type.md |  6 ++++++
 packages/core/package.json     |  5 +++--
 packages/core/src/llm/LLM.ts   |  4 +++-
 packages/edge/package.json     |  5 +++--
 pnpm-lock.yaml                 | 10 ++++++++++
 5 files changed, 25 insertions(+), 5 deletions(-)
 create mode 100644 .changeset/khaki-books-type.md

diff --git a/.changeset/khaki-books-type.md b/.changeset/khaki-books-type.md
new file mode 100644
index 000000000..4954897a9
--- /dev/null
+++ b/.changeset/khaki-books-type.md
@@ -0,0 +1,6 @@
+---
+"llamaindex": patch
+"@llamaindex/edge": patch
+---
+
+feat: support calculate Llama2 model tokens size
diff --git a/packages/core/package.json b/packages/core/package.json
index 388fc1dd9..bcdfe925c 100644
--- a/packages/core/package.json
+++ b/packages/core/package.json
@@ -25,6 +25,7 @@
     "chromadb": "~1.7.3",
     "cohere-ai": "^7.7.5",
     "js-tiktoken": "^1.0.10",
+    "llama-tokenizer-js": "^1.2.1",
     "lodash": "^4.17.21",
     "magic-bytes.js": "^1.10.0",
     "mammoth": "^1.6.0",
@@ -41,8 +42,8 @@
     "rake-modified": "^1.0.8",
     "replicate": "^0.25.2",
     "string-strip-html": "^13.4.6",
-    "wink-nlp": "^1.14.3",
-    "wikipedia": "^2.1.2"
+    "wikipedia": "^2.1.2",
+    "wink-nlp": "^1.14.3"
   },
   "devDependencies": {
     "@swc/cli": "^0.3.9",
diff --git a/packages/core/src/llm/LLM.ts b/packages/core/src/llm/LLM.ts
index aad062a5f..5ebc059ef 100644
--- a/packages/core/src/llm/LLM.ts
+++ b/packages/core/src/llm/LLM.ts
@@ -8,6 +8,7 @@ import type {
   StreamCallbackResponse,
 } from "../callbacks/CallbackManager.js";
 
+import llamaTokenizer from "llama-tokenizer-js";
 import type { ChatCompletionMessageParam } from "openai/resources/index.js";
 import type { LLMOptions } from "portkey-ai";
 import { Tokenizers, globalsHelper } from "../GlobalsHelper.js";
@@ -419,7 +420,8 @@ export class LlamaDeuce extends BaseLLM {
   }
 
   tokens(messages: ChatMessage[]): number {
-    throw new Error("Method not implemented.");
+    return llamaTokenizer.encode(this.mapMessagesToPrompt(messages).prompt)
+      .length;
   }
 
   get metadata() {
diff --git a/packages/edge/package.json b/packages/edge/package.json
index 8edf4ba89..afba58f29 100644
--- a/packages/edge/package.json
+++ b/packages/edge/package.json
@@ -24,6 +24,7 @@
     "chromadb": "~1.7.3",
     "cohere-ai": "^7.7.5",
     "js-tiktoken": "^1.0.10",
+    "llama-tokenizer-js": "^1.2.1",
     "lodash": "^4.17.21",
     "magic-bytes.js": "^1.10.0",
     "mammoth": "^1.6.0",
@@ -40,8 +41,8 @@
     "rake-modified": "^1.0.8",
     "replicate": "^0.25.2",
     "string-strip-html": "^13.4.6",
-    "wink-nlp": "^1.14.3",
-    "wikipedia": "^2.1.2"
+    "wikipedia": "^2.1.2",
+    "wink-nlp": "^1.14.3"
   },
   "engines": {
     "node": ">=18.0.0"
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 8dd88f9d1..f6b454397 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -228,6 +228,9 @@ importers:
       js-tiktoken:
         specifier: ^1.0.10
         version: 1.0.10
+      llama-tokenizer-js:
+        specifier: ^1.2.1
+        version: 1.2.1
       lodash:
         specifier: ^4.17.21
         version: 4.17.21
@@ -373,6 +376,9 @@ importers:
       js-tiktoken:
         specifier: ^1.0.10
         version: 1.0.10
+      llama-tokenizer-js:
+        specifier: ^1.2.1
+        version: 1.2.1
       lodash:
         specifier: ^4.17.21
         version: 4.17.21
@@ -9985,6 +9991,10 @@ packages:
       wrap-ansi: 9.0.0
     dev: true
 
+  /llama-tokenizer-js@1.2.1:
+    resolution: {integrity: sha512-SEVVc++cXR0D0Wv30AzMVWzPCAKM701vZYU31h5lCTIn4k5cfZpJ070YDcb2nPq2Ts3xgu44L19wIrq1z/XjXQ==}
+    dev: false
+
   /load-yaml-file@0.2.0:
     resolution: {integrity: sha512-OfCBkGEw4nN6JLtgRidPX6QxjBQGQf72q3si2uvqyFEMbycSFFHwAZeXx6cJgFM9wmLrf9zBwCP3Ivqa+LLZPw==}
     engines: {node: '>=6'}
-- 
GitLab