diff --git a/server/utils/AiProviders/perplexity/models.js b/server/utils/AiProviders/perplexity/models.js
index 1990c4e70e9278a732a87ac3ebd98eaee77406e6..e35068d4c1a103d2d19c8a004058ec68b92e8e8e 100644
--- a/server/utils/AiProviders/perplexity/models.js
+++ b/server/utils/AiProviders/perplexity/models.js
@@ -1,23 +1,23 @@
 const MODELS = {
-  "sonar-small-chat": {
-    id: "sonar-small-chat",
-    name: "sonar-small-chat",
-    maxLength: 16384,
-  },
-  "sonar-small-online": {
-    id: "sonar-small-online",
-    name: "sonar-small-online",
-    maxLength: 12000,
-  },
-  "sonar-medium-chat": {
-    id: "sonar-medium-chat",
-    name: "sonar-medium-chat",
-    maxLength: 16384,
-  },
-  "sonar-medium-online": {
-    id: "sonar-medium-online",
-    name: "sonar-medium-online",
-    maxLength: 12000,
+  "llama-3-sonar-small-32k-online\\*": {
+    id: "llama-3-sonar-small-32k-online\\*",
+    name: "llama-3-sonar-small-32k-online\\*",
+    maxLength: 28000,
+  },
+  "llama-3-sonar-small-32k-chat": {
+    id: "llama-3-sonar-small-32k-chat",
+    name: "llama-3-sonar-small-32k-chat",
+    maxLength: 32768,
+  },
+  "llama-3-sonar-large-32k-online\\*": {
+    id: "llama-3-sonar-large-32k-online\\*",
+    name: "llama-3-sonar-large-32k-online\\*",
+    maxLength: 28000,
+  },
+  "llama-3-sonar-large-32k-chat": {
+    id: "llama-3-sonar-large-32k-chat",
+    name: "llama-3-sonar-large-32k-chat",
+    maxLength: 32768,
   },
   "llama-3-8b-instruct": {
     id: "llama-3-8b-instruct",
@@ -29,26 +29,11 @@ const MODELS = {
     name: "llama-3-70b-instruct",
     maxLength: 8192,
   },
-  "codellama-70b-instruct": {
-    id: "codellama-70b-instruct",
-    name: "codellama-70b-instruct",
-    maxLength: 16384,
-  },
-  "mistral-7b-instruct": {
-    id: "mistral-7b-instruct",
-    name: "mistral-7b-instruct",
-    maxLength: 16384,
-  },
   "mixtral-8x7b-instruct": {
     id: "mixtral-8x7b-instruct",
     name: "mixtral-8x7b-instruct",
     maxLength: 16384,
   },
-  "mixtral-8x22b-instruct": {
-    id: "mixtral-8x22b-instruct",
-    name: "mixtral-8x22b-instruct",
-    maxLength: 16384,
-  },
 };
 
 module.exports.MODELS = MODELS;
diff --git a/server/utils/AiProviders/perplexity/scripts/chat_models.txt b/server/utils/AiProviders/perplexity/scripts/chat_models.txt
index 2a620525b929f76032c4678262f018fa662d8c70..e02af255135f3d563e979efec6e89024c96c3d78 100644
--- a/server/utils/AiProviders/perplexity/scripts/chat_models.txt
+++ b/server/utils/AiProviders/perplexity/scripts/chat_models.txt
@@ -1,12 +1,9 @@
-| Model                 | Parameter Count | Context Length | Model Type      |
-| :-------------------- | :-------------- | :------------- | :-------------- |
-| `sonar-small-chat`    | 7B              | 16384          | Chat Completion |
-| `sonar-small-online`  | 7B              | 12000          | Chat Completion |
-| `sonar-medium-chat`   | 8x7B            | 16384          | Chat Completion |
-| `sonar-medium-online` | 8x7B            | 12000          | Chat Completion |
-| `llama-3-8b-instruct`     | 8B              | 8192           | Chat Completion |
-| `llama-3-70b-instruct`    | 70B             | 8192           | Chat Completion |
-| `codellama-70b-instruct`  | 70B             | 16384          | Chat Completion |
-| `mistral-7b-instruct` [1] | 7B              | 16384          | Chat Completion |
-| `mixtral-8x7b-instruct`   | 8x7B            | 16384          | Chat Completion |
-| `mixtral-8x22b-instruct`  | 8x22B           | 16384          | Chat Completion |
\ No newline at end of file
+| Model                              | Parameter Count | Context Length | Model Type      |
+| :--------------------------------- | :-------------- | :------------- | :-------------- |
+| `llama-3-sonar-small-32k-online`\* | 8B              | 28,000         | Chat Completion |
+| `llama-3-sonar-small-32k-chat`     | 8B              | 32,768         | Chat Completion |
+| `llama-3-sonar-large-32k-online`\* | 70B             | 28,000         | Chat Completion |
+| `llama-3-sonar-large-32k-chat`     | 70B             | 32,768         | Chat Completion |
+| `llama-3-8b-instruct`   | 8B              | 8,192          | Chat Completion |
+| `llama-3-70b-instruct`  | 70B             | 8,192          | Chat Completion |
+| `mixtral-8x7b-instruct` | 8x7B            | 16,384         | Chat Completion |
\ No newline at end of file
diff --git a/server/utils/AiProviders/perplexity/scripts/parse.mjs b/server/utils/AiProviders/perplexity/scripts/parse.mjs
index fcbbaf73138b4c8249a22bdcbf623bbe88f2f9dc..6dec22d96b100d8a9a7e00671cdc4ea5547998f1 100644
--- a/server/utils/AiProviders/perplexity/scripts/parse.mjs
+++ b/server/utils/AiProviders/perplexity/scripts/parse.mjs
@@ -8,7 +8,7 @@
 // copy outputs into the export in ../models.js
 
 // Update the date below if you run this again because Perplexity added new models.
-// Last Collected: Apr 25, 2024
+// Last Collected: Jul 19, 2024
 
 import fs from "fs";
 
@@ -23,7 +23,7 @@ function parseChatModels() {
       .slice(1, -1)
       .map((text) => text.trim());
     model = model.replace(/`|\s*\[\d+\]\s*/g, "");
-    const maxLength = Number(contextLength.replace(/\s*\[\d+\]\s*/g, ""));
+    const maxLength = Number(contextLength.replace(/[^\d]/g, ""));
     if (model && maxLength) {
       models[model] = {
         id: model,