From 92da23e9638726b9b45c2cf5c048b6f6137df1a5 Mon Sep 17 00:00:00 2001
From: Timothy Carambat <rambat1010@gmail.com>
Date: Thu, 4 Jan 2024 15:47:00 -0800
Subject: [PATCH] Handle special token in TikToken (#528)

* Handle special token in TikToken
resolves #525

* remove duplicate method
add clarification comment on implementation
---
 server/utils/helpers/chat/index.js |  2 +-
 server/utils/helpers/tiktoken.js   | 14 +++++---------
 2 files changed, 6 insertions(+), 10 deletions(-)

diff --git a/server/utils/helpers/chat/index.js b/server/utils/helpers/chat/index.js
index ed7eab90f..b969201eb 100644
--- a/server/utils/helpers/chat/index.js
+++ b/server/utils/helpers/chat/index.js
@@ -300,7 +300,7 @@ function cannonball({
   // if the delta is the token difference between where our prompt is in size
   // and where we ideally need to land.
   const delta = initialInputSize - targetTokenSize;
-  const tokenChunks = tokenManager.tokensFromString(input);
+  const tokenChunks = tokenManager.countFromString(input);
   const middleIdx = Math.floor(tokenChunks.length / 2);
 
   // middle truncate the text going left and right of midpoint
diff --git a/server/utils/helpers/tiktoken.js b/server/utils/helpers/tiktoken.js
index ad1cdd444..134755492 100644
--- a/server/utils/helpers/tiktoken.js
+++ b/server/utils/helpers/tiktoken.js
@@ -3,12 +3,11 @@ const { getEncodingNameForModel, getEncoding } = require("js-tiktoken");
 class TokenManager {
   constructor(model = "gpt-3.5-turbo") {
     this.model = model;
-    this.encoderName = this.getEncodingFromModel(model);
+    this.encoderName = this.#getEncodingFromModel(model);
     this.encoder = getEncoding(this.encoderName);
-    this.buffer = 50;
   }
 
-  getEncodingFromModel(model) {
+  #getEncodingFromModel(model) {
     try {
       return getEncodingNameForModel(model);
     } catch {
@@ -16,18 +15,15 @@ class TokenManager {
     }
   }
 
-  tokensFromString(input = "") {
-    const tokens = this.encoder.encode(input);
-    return tokens;
-  }
-
   bytesFromTokens(tokens = []) {
     const bytes = this.encoder.decode(tokens);
     return bytes;
   }
 
+  // Pass in an empty array of disallowedSpecials to handle all tokens as text and to be tokenized.
+  // https://github.com/openai/tiktoken/blob/9e79899bc248d5313c7dd73562b5e211d728723d/tiktoken/core.py#L91C20-L91C38
   countFromString(input = "") {
-    const tokens = this.encoder.encode(input);
+    const tokens = this.encoder.encode(input, undefined, []);
     return tokens.length;
   }
 
-- 
GitLab