diff --git a/server/utils/AiProviders/perplexity/models.js b/server/utils/AiProviders/perplexity/models.js
index f20ce70ca3de2b1b178d6318d8e5b5118817e6d8..64bd2cffaee8e1187dba1e4f83dbb26950fc1d72 100644
--- a/server/utils/AiProviders/perplexity/models.js
+++ b/server/utils/AiProviders/perplexity/models.js
@@ -64,6 +64,11 @@ const MODELS = {
     name: "llama-3.1-70b-instruct",
     maxLength: 131072,
   },
+  "llama-3.1-sonar-huge-128k-chat": {
+    id: "llama-3.1-sonar-huge-128k-chat",
+    name: "llama-3.1-sonar-huge-128k-chat",
+    maxLength: 127072,
+  },
 };
 
 module.exports.MODELS = MODELS;
diff --git a/server/utils/AiProviders/perplexity/scripts/chat_models.txt b/server/utils/AiProviders/perplexity/scripts/chat_models.txt
index 8e98f0c2009160cef96cd5746002f7d165a11892..ec9081efe91aaee3be20ed0401705de8fc2e97ea 100644
--- a/server/utils/AiProviders/perplexity/scripts/chat_models.txt
+++ b/server/utils/AiProviders/perplexity/scripts/chat_models.txt
@@ -12,4 +12,5 @@
 | `llama-3-70b-instruct`  | 70B             | 8,192          | Chat Completion |
 | `mixtral-8x7b-instruct` | 8x7B            | 16,384         | Chat Completion |
 | `llama-3.1-8b-instruct`   | 8B              | 131,072        | Chat Completion |
-| `llama-3.1-70b-instruct`  | 70B             | 131,072        | Chat Completion |
\ No newline at end of file
+| `llama-3.1-70b-instruct`  | 70B             | 131,072        | Chat Completion |
+| `llama-3.1-sonar-huge-128k-chat`   | 405B             | 127,072        | Chat Completion |
\ No newline at end of file