diff --git a/server/utils/AiProviders/togetherAi/index.js b/server/utils/AiProviders/togetherAi/index.js
index 8c9f8831cf0c5987bf4890d368c9aaafb291ff90..b255a5a9d366c12f8060d9e17e3788824ff45456 100644
--- a/server/utils/AiProviders/togetherAi/index.js
+++ b/server/utils/AiProviders/togetherAi/index.js
@@ -40,6 +40,23 @@ class TogetherAiLLM {
     );
   }
 
+  #generateContent({ userPrompt, attachments = [] }) {
+    if (!attachments.length) {
+      return userPrompt;
+    }
+
+    const content = [{ type: "text", text: userPrompt }];
+    for (let attachment of attachments) {
+      content.push({
+        type: "image_url",
+        image_url: {
+          url: attachment.contentString,
+        },
+      });
+    }
+    return content.flat();
+  }
+
   allModelInformation() {
     return togetherAiModels();
   }
@@ -70,12 +87,20 @@ class TogetherAiLLM {
     contextTexts = [],
     chatHistory = [],
     userPrompt = "",
+    attachments = [],
   }) {
     const prompt = {
       role: "system",
       content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
     };
-    return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
+    return [
+      prompt,
+      ...chatHistory,
+      {
+        role: "user",
+        content: this.#generateContent({ userPrompt, attachments }),
+      },
+    ];
   }
 
   async getChatCompletion(messages = null, { temperature = 0.7 }) {
diff --git a/server/utils/AiProviders/togetherAi/models.js b/server/utils/AiProviders/togetherAi/models.js
index d2bdc65bf7173f8cd1709e57689e26632701eaf1..a6f536fb32cbbc64bde93a426ce3ee84f01c205b 100644
--- a/server/utils/AiProviders/togetherAi/models.js
+++ b/server/utils/AiProviders/togetherAi/models.js
@@ -5,12 +5,24 @@ const MODELS = {
     name: "01-ai Yi Chat (34B)",
     maxLength: 4096,
   },
+  "allenai/OLMo-7B-Instruct": {
+    id: "allenai/OLMo-7B-Instruct",
+    organization: "AllenAI",
+    name: "OLMo Instruct (7B)",
+    maxLength: 2048,
+  },
   "Austism/chronos-hermes-13b": {
     id: "Austism/chronos-hermes-13b",
     organization: "Austism",
     name: "Chronos Hermes (13B)",
     maxLength: 2048,
   },
+  "carson/ml318br": {
+    id: "carson/ml318br",
+    organization: "carson",
+    name: "carson ml318br",
+    maxLength: 8192,
+  },
   "cognitivecomputations/dolphin-2.5-mixtral-8x7b": {
     id: "cognitivecomputations/dolphin-2.5-mixtral-8x7b",
     organization: "cognitivecomputations",
@@ -19,40 +31,58 @@ const MODELS = {
   },
   "databricks/dbrx-instruct": {
     id: "databricks/dbrx-instruct",
-    organization: "databricks",
+    organization: "Databricks",
     name: "DBRX Instruct",
     maxLength: 32768,
   },
-  "deepseek-ai/deepseek-coder-33b-instruct": {
-    id: "deepseek-ai/deepseek-coder-33b-instruct",
-    organization: "DeepSeek",
-    name: "Deepseek Coder Instruct (33B)",
-    maxLength: 16384,
-  },
   "deepseek-ai/deepseek-llm-67b-chat": {
     id: "deepseek-ai/deepseek-llm-67b-chat",
     organization: "DeepSeek",
     name: "DeepSeek LLM Chat (67B)",
     maxLength: 4096,
   },
+  "deepseek-ai/deepseek-coder-33b-instruct": {
+    id: "deepseek-ai/deepseek-coder-33b-instruct",
+    organization: "DeepSeek",
+    name: "Deepseek Coder Instruct (33B)",
+    maxLength: 16384,
+  },
   "garage-bAInd/Platypus2-70B-instruct": {
     id: "garage-bAInd/Platypus2-70B-instruct",
     organization: "garage-bAInd",
     name: "Platypus2 Instruct (70B)",
     maxLength: 4096,
   },
+  "google/gemma-2-9b-it": {
+    id: "google/gemma-2-9b-it",
+    organization: "google",
+    name: "Gemma-2 Instruct (9B)",
+    maxLength: 8192,
+  },
   "google/gemma-2b-it": {
     id: "google/gemma-2b-it",
     organization: "Google",
     name: "Gemma Instruct (2B)",
     maxLength: 8192,
   },
+  "google/gemma-2-27b-it": {
+    id: "google/gemma-2-27b-it",
+    organization: "Google",
+    name: "Gemma-2 Instruct (27B)",
+    maxLength: 8192,
+  },
   "google/gemma-7b-it": {
     id: "google/gemma-7b-it",
     organization: "Google",
     name: "Gemma Instruct (7B)",
     maxLength: 8192,
   },
+  "gradientai/Llama-3-70B-Instruct-Gradient-1048k": {
+    id: "gradientai/Llama-3-70B-Instruct-Gradient-1048k",
+    organization: "gradientai",
+    name: "Llama-3 70B Instruct Gradient 1048K",
+    maxLength: 1048576,
+  },
   "Gryphe/MythoMax-L2-13b": {
     id: "Gryphe/MythoMax-L2-13b",
     organization: "Gryphe",
@@ -62,50 +92,104 @@ const MODELS = {
   "Gryphe/MythoMax-L2-13b-Lite": {
     id: "Gryphe/MythoMax-L2-13b-Lite",
     organization: "Gryphe",
-    name: "MythoMax-L2 Lite (13B)",
+    name: "Gryphe MythoMax L2 Lite (13B)",
     maxLength: 4096,
   },
+  "llava-hf/llava-v1.6-mistral-7b-hf": {
+    id: "llava-hf/llava-v1.6-mistral-7b-hf",
+    organization: "Haotian Liu",
+    name: "LLaVa-Next (Mistral-7B)",
+    maxLength: 4096,
+  },
+  "HuggingFaceH4/zephyr-7b-beta": {
+    id: "HuggingFaceH4/zephyr-7b-beta",
+    organization: "HuggingFace",
+    name: "Zephyr-7B-ß",
+    maxLength: 32768,
+  },
+  "togethercomputer/Koala-7B": {
+    id: "togethercomputer/Koala-7B",
+    organization: "LM Sys",
+    name: "Koala (7B)",
+    maxLength: 2048,
+  },
+  "lmsys/vicuna-7b-v1.3": {
+    id: "lmsys/vicuna-7b-v1.3",
+    organization: "LM Sys",
+    name: "Vicuna v1.3 (7B)",
+    maxLength: 2048,
+  },
+  "lmsys/vicuna-13b-v1.5-16k": {
+    id: "lmsys/vicuna-13b-v1.5-16k",
+    organization: "LM Sys",
+    name: "Vicuna v1.5 16K (13B)",
+    maxLength: 16384,
+  },
   "lmsys/vicuna-13b-v1.5": {
     id: "lmsys/vicuna-13b-v1.5",
     organization: "LM Sys",
     name: "Vicuna v1.5 (13B)",
     maxLength: 4096,
   },
+  "lmsys/vicuna-13b-v1.3": {
+    id: "lmsys/vicuna-13b-v1.3",
+    organization: "LM Sys",
+    name: "Vicuna v1.3 (13B)",
+    maxLength: 2048,
+  },
+  "togethercomputer/Koala-13B": {
+    id: "togethercomputer/Koala-13B",
+    organization: "LM Sys",
+    name: "Koala (13B)",
+    maxLength: 2048,
+  },
   "lmsys/vicuna-7b-v1.5": {
     id: "lmsys/vicuna-7b-v1.5",
     organization: "LM Sys",
     name: "Vicuna v1.5 (7B)",
     maxLength: 4096,
   },
-  "codellama/CodeLlama-13b-Instruct-hf": {
-    id: "codellama/CodeLlama-13b-Instruct-hf",
-    organization: "Meta",
-    name: "Code Llama Instruct (13B)",
-    maxLength: 16384,
-  },
   "codellama/CodeLlama-34b-Instruct-hf": {
     id: "codellama/CodeLlama-34b-Instruct-hf",
     organization: "Meta",
     name: "Code Llama Instruct (34B)",
     maxLength: 16384,
   },
-  "codellama/CodeLlama-70b-Instruct-hf": {
-    id: "codellama/CodeLlama-70b-Instruct-hf",
+  "togethercomputer/Llama-3-8b-chat-hf-int4": {
+    id: "togethercomputer/Llama-3-8b-chat-hf-int4",
     organization: "Meta",
-    name: "Code Llama Instruct (70B)",
-    maxLength: 4096,
+    name: "Llama3 8B Chat HF INT4",
+    maxLength: 8192,
   },
-  "codellama/CodeLlama-7b-Instruct-hf": {
-    id: "codellama/CodeLlama-7b-Instruct-hf",
+  "meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo": {
+    id: "meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo",
     organization: "Meta",
-    name: "Code Llama Instruct (7B)",
-    maxLength: 16384,
+    name: "Llama 3.2 90B Vision Instruct Turbo",
+    maxLength: 131072,
   },
-  "meta-llama/Llama-2-70b-chat-hf": {
-    id: "meta-llama/Llama-2-70b-chat-hf",
+  "meta-llama/Llama-3.2-11B-Vision-Instruct-Turbo": {
+    id: "meta-llama/Llama-3.2-11B-Vision-Instruct-Turbo",
     organization: "Meta",
-    name: "LLaMA-2 Chat (70B)",
-    maxLength: 4096,
+    name: "Llama 3.2 11B Vision Instruct Turbo",
+    maxLength: 131072,
+  },
+  "meta-llama/Llama-3.2-3B-Instruct-Turbo": {
+    id: "meta-llama/Llama-3.2-3B-Instruct-Turbo",
+    organization: "Meta",
+    name: "Meta Llama 3.2 3B Instruct Turbo",
+    maxLength: 131072,
+  },
+  "togethercomputer/Llama-3-8b-chat-hf-int8": {
+    id: "togethercomputer/Llama-3-8b-chat-hf-int8",
+    organization: "Meta",
+    name: "Togethercomputer Llama3 8B Instruct Int8",
+    maxLength: 8192,
+  },
+  "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo": {
+    id: "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo",
+    organization: "Meta",
+    name: "Meta Llama 3.1 70B Instruct Turbo",
+    maxLength: 32768,
   },
   "meta-llama/Llama-2-13b-chat-hf": {
     id: "meta-llama/Llama-2-13b-chat-hf",
@@ -113,59 +197,173 @@ const MODELS = {
     name: "LLaMA-2 Chat (13B)",
     maxLength: 4096,
   },
-  "meta-llama/Llama-2-7b-chat-hf": {
-    id: "meta-llama/Llama-2-7b-chat-hf",
+  "meta-llama/Meta-Llama-3-70B-Instruct-Lite": {
+    id: "meta-llama/Meta-Llama-3-70B-Instruct-Lite",
     organization: "Meta",
-    name: "LLaMA-2 Chat (7B)",
-    maxLength: 4096,
+    name: "Meta Llama 3 70B Instruct Lite",
+    maxLength: 8192,
   },
   "meta-llama/Llama-3-8b-chat-hf": {
     id: "meta-llama/Llama-3-8b-chat-hf",
     organization: "Meta",
-    name: "LLaMA-3 Chat (8B)",
+    name: "Meta Llama 3 8B Instruct Reference",
     maxLength: 8192,
   },
   "meta-llama/Llama-3-70b-chat-hf": {
     id: "meta-llama/Llama-3-70b-chat-hf",
     organization: "Meta",
-    name: "LLaMA-3 Chat (70B)",
+    name: "Meta Llama 3 70B Instruct Reference",
     maxLength: 8192,
   },
   "meta-llama/Meta-Llama-3-8B-Instruct-Turbo": {
     id: "meta-llama/Meta-Llama-3-8B-Instruct-Turbo",
     organization: "Meta",
-    name: "LLaMA-3 Chat (8B) Turbo",
+    name: "Meta Llama 3 8B Instruct Turbo",
     maxLength: 8192,
   },
+  "meta-llama/Meta-Llama-3-8B-Instruct-Lite": {
+    id: "meta-llama/Meta-Llama-3-8B-Instruct-Lite",
+    organization: "Meta",
+    name: "Meta Llama 3 8B Instruct Lite",
+    maxLength: 8192,
+  },
+  "meta-llama/Meta-Llama-3.1-405B-Instruct-Lite-Pro": {
+    id: "meta-llama/Meta-Llama-3.1-405B-Instruct-Lite-Pro",
+    organization: "Meta",
+    name: "Meta Llama 3.1 405B Instruct Turbo",
+    maxLength: 4096,
+  },
+  "meta-llama/Llama-2-7b-chat-hf": {
+    id: "meta-llama/Llama-2-7b-chat-hf",
+    organization: "Meta",
+    name: "LLaMA-2 Chat (7B)",
+    maxLength: 4096,
+  },
+  "meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo": {
+    id: "meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo",
+    organization: "Meta",
+    name: "Meta Llama 3.1 405B Instruct Turbo",
+    maxLength: 130815,
+  },
+  "meta-llama/Llama-Vision-Free": {
+    id: "meta-llama/Llama-Vision-Free",
+    organization: "Meta",
+    name: "(Free) Llama 3.2 11B Vision Instruct Turbo",
+    maxLength: 131072,
+  },
   "meta-llama/Meta-Llama-3-70B-Instruct-Turbo": {
     id: "meta-llama/Meta-Llama-3-70B-Instruct-Turbo",
     organization: "Meta",
-    name: "LLaMA-3 Chat (70B) Turbo",
+    name: "Meta Llama 3 70B Instruct Turbo",
     maxLength: 8192,
   },
   "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo": {
     id: "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo",
     organization: "Meta",
-    name: "Llama 3.1 8B Instruct Turbo",
+    name: "Meta Llama 3.1 8B Instruct Turbo",
+    maxLength: 32768,
+  },
+  "togethercomputer/CodeLlama-7b-Instruct": {
+    id: "togethercomputer/CodeLlama-7b-Instruct",
+    organization: "Meta",
+    name: "Code Llama Instruct (7B)",
+    maxLength: 16384,
+  },
+  "togethercomputer/CodeLlama-34b-Instruct": {
+    id: "togethercomputer/CodeLlama-34b-Instruct",
+    organization: "Meta",
+    name: "Code Llama Instruct (34B)",
+    maxLength: 16384,
+  },
+  "codellama/CodeLlama-13b-Instruct-hf": {
+    id: "codellama/CodeLlama-13b-Instruct-hf",
+    organization: "Meta",
+    name: "Code Llama Instruct (13B)",
+    maxLength: 16384,
+  },
+  "togethercomputer/CodeLlama-13b-Instruct": {
+    id: "togethercomputer/CodeLlama-13b-Instruct",
+    organization: "Meta",
+    name: "Code Llama Instruct (13B)",
+    maxLength: 16384,
+  },
+  "togethercomputer/llama-2-13b-chat": {
+    id: "togethercomputer/llama-2-13b-chat",
+    organization: "Meta",
+    name: "LLaMA-2 Chat (13B)",
+    maxLength: 4096,
+  },
+  "togethercomputer/llama-2-7b-chat": {
+    id: "togethercomputer/llama-2-7b-chat",
+    organization: "Meta",
+    name: "LLaMA-2 Chat (7B)",
+    maxLength: 4096,
+  },
+  "meta-llama/Meta-Llama-3-8B-Instruct": {
+    id: "meta-llama/Meta-Llama-3-8B-Instruct",
+    organization: "Meta",
+    name: "Meta Llama 3 8B Instruct",
     maxLength: 8192,
   },
-  "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo": {
-    id: "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo",
+  "meta-llama/Meta-Llama-3-70B-Instruct": {
+    id: "meta-llama/Meta-Llama-3-70B-Instruct",
     organization: "Meta",
-    name: "Llama 3.1 70B Instruct Turbo",
+    name: "Meta Llama 3 70B Instruct",
     maxLength: 8192,
   },
-  "meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo": {
-    id: "meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo",
+  "codellama/CodeLlama-70b-Instruct-hf": {
+    id: "codellama/CodeLlama-70b-Instruct-hf",
+    organization: "Meta",
+    name: "Code Llama Instruct (70B)",
+    maxLength: 4096,
+  },
+  "togethercomputer/llama-2-70b-chat": {
+    id: "togethercomputer/llama-2-70b-chat",
     organization: "Meta",
-    name: "Llama 3.1 405B Instruct Turbo",
+    name: "LLaMA-2 Chat (70B)",
     maxLength: 4096,
   },
+  "codellama/CodeLlama-7b-Instruct-hf": {
+    id: "codellama/CodeLlama-7b-Instruct-hf",
+    organization: "Meta",
+    name: "Code Llama Instruct (7B)",
+    maxLength: 16384,
+  },
+  "meta-llama/Llama-2-70b-chat-hf": {
+    id: "meta-llama/Llama-2-70b-chat-hf",
+    organization: "Meta",
+    name: "LLaMA-2 Chat (70B)",
+    maxLength: 4096,
+  },
+  "meta-llama/Meta-Llama-3.1-8B-Instruct-Reference": {
+    id: "meta-llama/Meta-Llama-3.1-8B-Instruct-Reference",
+    organization: "Meta",
+    name: "Meta Llama 3.1 8B Instruct",
+    maxLength: 16384,
+  },
+  "albert/meta-llama-3-1-70b-instruct-turbo": {
+    id: "albert/meta-llama-3-1-70b-instruct-turbo",
+    organization: "Meta",
+    name: "Meta Llama 3.1 70B Instruct Turbo",
+    maxLength: 131072,
+  },
+  "meta-llama/Meta-Llama-3.1-70B-Instruct-Reference": {
+    id: "meta-llama/Meta-Llama-3.1-70B-Instruct-Reference",
+    organization: "Meta",
+    name: "Meta Llama 3.1 70B Instruct",
+    maxLength: 8192,
+  },
+  "microsoft/WizardLM-2-8x22B": {
+    id: "microsoft/WizardLM-2-8x22B",
+    organization: "microsoft",
+    name: "WizardLM-2 (8x22B)",
+    maxLength: 65536,
+  },
   "mistralai/Mistral-7B-Instruct-v0.1": {
     id: "mistralai/Mistral-7B-Instruct-v0.1",
     organization: "mistralai",
     name: "Mistral (7B) Instruct",
-    maxLength: 8192,
+    maxLength: 4096,
   },
   "mistralai/Mistral-7B-Instruct-v0.2": {
     id: "mistralai/Mistral-7B-Instruct-v0.2",
@@ -182,37 +380,43 @@ const MODELS = {
   "mistralai/Mixtral-8x7B-Instruct-v0.1": {
     id: "mistralai/Mixtral-8x7B-Instruct-v0.1",
     organization: "mistralai",
-    name: "Mixtral-8x7B Instruct (46.7B)",
+    name: "Mixtral-8x7B Instruct v0.1",
     maxLength: 32768,
   },
   "mistralai/Mixtral-8x22B-Instruct-v0.1": {
     id: "mistralai/Mixtral-8x22B-Instruct-v0.1",
     organization: "mistralai",
-    name: "Mixtral-8x22B Instruct (141B)",
+    name: "Mixtral-8x22B Instruct v0.1",
     maxLength: 65536,
   },
-  "NousResearch/Nous-Capybara-7B-V1p9": {
-    id: "NousResearch/Nous-Capybara-7B-V1p9",
-    organization: "NousResearch",
-    name: "Nous Capybara v1.9 (7B)",
-    maxLength: 8192,
-  },
-  "NousResearch/Nous-Hermes-2-Mistral-7B-DPO": {
-    id: "NousResearch/Nous-Hermes-2-Mistral-7B-DPO",
-    organization: "NousResearch",
-    name: "Nous Hermes 2 - Mistral DPO (7B)",
-    maxLength: 32768,
-  },
   "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO": {
     id: "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
     organization: "NousResearch",
-    name: "Nous Hermes 2 - Mixtral 8x7B-DPO (46.7B)",
+    name: "Nous Hermes 2 - Mixtral 8x7B-DPO",
     maxLength: 32768,
   },
+  "NousResearch/Nous-Hermes-Llama2-70b": {
+    id: "NousResearch/Nous-Hermes-Llama2-70b",
+    organization: "NousResearch",
+    name: "Nous Hermes LLaMA-2 (70B)",
+    maxLength: 4096,
+  },
   "NousResearch/Nous-Hermes-2-Mixtral-8x7B-SFT": {
     id: "NousResearch/Nous-Hermes-2-Mixtral-8x7B-SFT",
     organization: "NousResearch",
-    name: "Nous Hermes 2 - Mixtral 8x7B-SFT (46.7B)",
+    name: "Nous Hermes 2 - Mixtral 8x7B-SFT",
+    maxLength: 32768,
+  },
+  "NousResearch/Nous-Hermes-Llama2-13b": {
+    id: "NousResearch/Nous-Hermes-Llama2-13b",
+    organization: "NousResearch",
+    name: "Nous Hermes Llama-2 (13B)",
+    maxLength: 4096,
+  },
+  "NousResearch/Nous-Hermes-2-Mistral-7B-DPO": {
+    id: "NousResearch/Nous-Hermes-2-Mistral-7B-DPO",
+    organization: "NousResearch",
+    name: "Nous Hermes 2 - Mistral DPO (7B)",
     maxLength: 32768,
   },
   "NousResearch/Nous-Hermes-llama-2-7b": {
@@ -221,22 +425,22 @@ const MODELS = {
     name: "Nous Hermes LLaMA-2 (7B)",
     maxLength: 4096,
   },
-  "NousResearch/Nous-Hermes-Llama2-13b": {
-    id: "NousResearch/Nous-Hermes-Llama2-13b",
+  "NousResearch/Nous-Capybara-7B-V1p9": {
+    id: "NousResearch/Nous-Capybara-7B-V1p9",
     organization: "NousResearch",
-    name: "Nous Hermes Llama-2 (13B)",
-    maxLength: 4096,
+    name: "Nous Capybara v1.9 (7B)",
+    maxLength: 8192,
   },
-  "NousResearch/Nous-Hermes-2-Yi-34B": {
-    id: "NousResearch/Nous-Hermes-2-Yi-34B",
+  "NousResearch/Hermes-2-Theta-Llama-3-70B": {
+    id: "NousResearch/Hermes-2-Theta-Llama-3-70B",
     organization: "NousResearch",
-    name: "Nous Hermes-2 Yi (34B)",
-    maxLength: 4096,
+    name: "Hermes 2 Theta Llama-3 70B",
+    maxLength: 8192,
   },
   "openchat/openchat-3.5-1210": {
     id: "openchat/openchat-3.5-1210",
     organization: "OpenChat",
-    name: "OpenChat 3.5 (7B)",
+    name: "OpenChat 3.5",
     maxLength: 8192,
   },
   "Open-Orca/Mistral-7B-OpenOrca": {
@@ -245,28 +449,46 @@ const MODELS = {
     name: "OpenOrca Mistral (7B) 8K",
     maxLength: 8192,
   },
-  "Qwen/Qwen1.5-0.5B-Chat": {
-    id: "Qwen/Qwen1.5-0.5B-Chat",
+  "Qwen/Qwen2-72B-Instruct": {
+    id: "Qwen/Qwen2-72B-Instruct",
     organization: "Qwen",
-    name: "Qwen 1.5 Chat (0.5B)",
+    name: "Qwen 2 Instruct (72B)",
     maxLength: 32768,
   },
-  "Qwen/Qwen1.5-1.8B-Chat": {
-    id: "Qwen/Qwen1.5-1.8B-Chat",
+  "Qwen/Qwen2.5-72B-Instruct-Turbo": {
+    id: "Qwen/Qwen2.5-72B-Instruct-Turbo",
     organization: "Qwen",
-    name: "Qwen 1.5 Chat (1.8B)",
+    name: "Qwen2.5 72B Instruct Turbo",
     maxLength: 32768,
   },
-  "Qwen/Qwen1.5-4B-Chat": {
-    id: "Qwen/Qwen1.5-4B-Chat",
+  "Qwen/Qwen2.5-7B-Instruct-Turbo": {
+    id: "Qwen/Qwen2.5-7B-Instruct-Turbo",
     organization: "Qwen",
-    name: "Qwen 1.5 Chat (4B)",
+    name: "Qwen2.5 7B Instruct Turbo",
     maxLength: 32768,
   },
-  "Qwen/Qwen1.5-7B-Chat": {
-    id: "Qwen/Qwen1.5-7B-Chat",
+  "Qwen/Qwen1.5-110B-Chat": {
+    id: "Qwen/Qwen1.5-110B-Chat",
     organization: "Qwen",
-    name: "Qwen 1.5 Chat (7B)",
+    name: "Qwen 1.5 Chat (110B)",
+    maxLength: 32768,
+  },
+  "Qwen/Qwen1.5-72B-Chat": {
+    id: "Qwen/Qwen1.5-72B-Chat",
+    organization: "Qwen",
+    name: "Qwen 1.5 Chat (72B)",
+    maxLength: 32768,
+  },
+  "Qwen/Qwen2-1.5B-Instruct": {
+    id: "Qwen/Qwen2-1.5B-Instruct",
+    organization: "Qwen",
+    name: "Qwen 2 Instruct (1.5B)",
+    maxLength: 32768,
+  },
+  "Qwen/Qwen2-7B-Instruct": {
+    id: "Qwen/Qwen2-7B-Instruct",
+    organization: "Qwen",
+    name: "Qwen 2 Instruct (7B)",
     maxLength: 32768,
   },
   "Qwen/Qwen1.5-14B-Chat": {
@@ -275,28 +497,34 @@ const MODELS = {
     name: "Qwen 1.5 Chat (14B)",
     maxLength: 32768,
   },
+  "Qwen/Qwen1.5-1.8B-Chat": {
+    id: "Qwen/Qwen1.5-1.8B-Chat",
+    organization: "Qwen",
+    name: "Qwen 1.5 Chat (1.8B)",
+    maxLength: 32768,
+  },
   "Qwen/Qwen1.5-32B-Chat": {
     id: "Qwen/Qwen1.5-32B-Chat",
     organization: "Qwen",
     name: "Qwen 1.5 Chat (32B)",
     maxLength: 32768,
   },
-  "Qwen/Qwen1.5-72B-Chat": {
-    id: "Qwen/Qwen1.5-72B-Chat",
+  "Qwen/Qwen1.5-7B-Chat": {
+    id: "Qwen/Qwen1.5-7B-Chat",
     organization: "Qwen",
-    name: "Qwen 1.5 Chat (72B)",
+    name: "Qwen 1.5 Chat (7B)",
     maxLength: 32768,
   },
-  "Qwen/Qwen1.5-110B-Chat": {
-    id: "Qwen/Qwen1.5-110B-Chat",
+  "Qwen/Qwen1.5-0.5B-Chat": {
+    id: "Qwen/Qwen1.5-0.5B-Chat",
     organization: "Qwen",
-    name: "Qwen 1.5 Chat (110B)",
+    name: "Qwen 1.5 Chat (0.5B)",
     maxLength: 32768,
   },
-  "Qwen/Qwen2-72B-Instruct": {
-    id: "Qwen/Qwen2-72B-Instruct",
+  "Qwen/Qwen1.5-4B-Chat": {
+    id: "Qwen/Qwen1.5-4B-Chat",
     organization: "Qwen",
-    name: "Qwen 2 Instruct (72B)",
+    name: "Qwen 1.5 Chat (4B)",
     maxLength: 32768,
   },
   "snorkelai/Snorkel-Mistral-PairRM-DPO": {
@@ -319,39 +547,45 @@ const MODELS = {
   },
   "teknium/OpenHermes-2-Mistral-7B": {
     id: "teknium/OpenHermes-2-Mistral-7B",
-    organization: "Teknium",
+    organization: "teknium",
     name: "OpenHermes-2-Mistral (7B)",
     maxLength: 8192,
   },
   "teknium/OpenHermes-2p5-Mistral-7B": {
     id: "teknium/OpenHermes-2p5-Mistral-7B",
-    organization: "Teknium",
+    organization: "teknium",
     name: "OpenHermes-2.5-Mistral (7B)",
     maxLength: 8192,
   },
-  "togethercomputer/Llama-2-7B-32K-Instruct": {
-    id: "togethercomputer/Llama-2-7B-32K-Instruct",
-    organization: "Together",
-    name: "LLaMA-2-7B-32K-Instruct (7B)",
-    maxLength: 32768,
+  "test/test11": {
+    id: "test/test11",
+    organization: "test",
+    name: "Test 11",
+    maxLength: 4096,
   },
-  "togethercomputer/RedPajama-INCITE-Chat-3B-v1": {
-    id: "togethercomputer/RedPajama-INCITE-Chat-3B-v1",
-    organization: "Together",
-    name: "RedPajama-INCITE Chat (3B)",
+  "togethercomputer/guanaco-65b": {
+    id: "togethercomputer/guanaco-65b",
+    organization: "Tim Dettmers",
+    name: "Guanaco (65B)",
     maxLength: 2048,
   },
-  "togethercomputer/RedPajama-INCITE-7B-Chat": {
-    id: "togethercomputer/RedPajama-INCITE-7B-Chat",
-    organization: "Together",
-    name: "RedPajama-INCITE Chat (7B)",
+  "togethercomputer/guanaco-13b": {
+    id: "togethercomputer/guanaco-13b",
+    organization: "Tim Dettmers",
+    name: "Guanaco (13B)",
     maxLength: 2048,
   },
-  "togethercomputer/StripedHyena-Nous-7B": {
-    id: "togethercomputer/StripedHyena-Nous-7B",
-    organization: "Together",
-    name: "StripedHyena Nous (7B)",
-    maxLength: 32768,
+  "togethercomputer/guanaco-33b": {
+    id: "togethercomputer/guanaco-33b",
+    organization: "Tim Dettmers",
+    name: "Guanaco (33B)",
+    maxLength: 2048,
+  },
+  "togethercomputer/guanaco-7b": {
+    id: "togethercomputer/guanaco-7b",
+    organization: "Tim Dettmers",
+    name: "Guanaco (7B)",
+    maxLength: 2048,
   },
   "Undi95/ReMM-SLERP-L2-13B": {
     id: "Undi95/ReMM-SLERP-L2-13B",
@@ -365,18 +599,24 @@ const MODELS = {
     name: "Toppy M (7B)",
     maxLength: 4096,
   },
-  "WizardLM/WizardLM-13B-V1.2": {
-    id: "WizardLM/WizardLM-13B-V1.2",
-    organization: "WizardLM",
-    name: "WizardLM v1.2 (13B)",
-    maxLength: 4096,
-  },
   "upstage/SOLAR-10.7B-Instruct-v1.0": {
     id: "upstage/SOLAR-10.7B-Instruct-v1.0",
     organization: "upstage",
     name: "Upstage SOLAR Instruct v1 (11B)",
     maxLength: 4096,
   },
+  "togethercomputer/SOLAR-10.7B-Instruct-v1.0-int4": {
+    id: "togethercomputer/SOLAR-10.7B-Instruct-v1.0-int4",
+    organization: "upstage",
+    name: "Upstage SOLAR Instruct v1 (11B)-Int4",
+    maxLength: 4096,
+  },
+  "WizardLM/WizardLM-13B-V1.2": {
+    id: "WizardLM/WizardLM-13B-V1.2",
+    organization: "WizardLM",
+    name: "WizardLM v1.2 (13B)",
+    maxLength: 4096,
+  },
 };
 
 module.exports.MODELS = MODELS;
diff --git a/server/utils/AiProviders/togetherAi/scripts/chat_models.txt b/server/utils/AiProviders/togetherAi/scripts/chat_models.txt
index 5d244d065d6fcc69a6efde6a7a6f81052b307804..f071d1eb8917dcef1da787e2015c9f9cd03d30df 100644
--- a/server/utils/AiProviders/togetherAi/scripts/chat_models.txt
+++ b/server/utils/AiProviders/togetherAi/scripts/chat_models.txt
@@ -1,65 +1,108 @@
-| Organization          | Model Name                               | Model String for API                           | Context length |
-| :-------------------- | :--------------------------------------- | :--------------------------------------------- | :------------- |
-| 01.AI                 | 01-ai Yi Chat (34B)                      | zero-one-ai/Yi-34B-Chat                        | 4096           |
-| Austism               | Chronos Hermes (13B)                     | Austism/chronos-hermes-13b                     | 2048           |
-| cognitivecomputations | Dolphin 2.5 Mixtral 8x7b                 | cognitivecomputations/dolphin-2.5-mixtral-8x7b | 32768          |
-| databricks            | DBRX Instruct                            | databricks/dbrx-instruct                       | 32768          |
-| DeepSeek              | Deepseek Coder Instruct (33B)            | deepseek-ai/deepseek-coder-33b-instruct        | 16384          |
-| DeepSeek              | DeepSeek LLM Chat (67B)                  | deepseek-ai/deepseek-llm-67b-chat              | 4096           |
-| garage-bAInd          | Platypus2 Instruct (70B)                 | garage-bAInd/Platypus2-70B-instruct            | 4096           |
-| Google                | Gemma Instruct (2B)                      | google/gemma-2b-it                             | 8192           |
-| Google                | Gemma Instruct (7B)                      | google/gemma-7b-it                             | 8192           |
-| Gryphe                | MythoMax-L2 (13B)                        | Gryphe/MythoMax-L2-13b                         | 4096           |
-| Gryphe                | MythoMax-L2 Lite (13B)                   | Gryphe/MythoMax-L2-13b-Lite                    | 4096           |
-| LM Sys                | Vicuna v1.5 (13B)                        | lmsys/vicuna-13b-v1.5                          | 4096           |
-| LM Sys                | Vicuna v1.5 (7B)                         | lmsys/vicuna-7b-v1.5                           | 4096           |
-| Meta                  | Code Llama Instruct (13B)                | codellama/CodeLlama-13b-Instruct-hf            | 16384          |
-| Meta                  | Code Llama Instruct (34B)                | codellama/CodeLlama-34b-Instruct-hf            | 16384          |
-| Meta                  | Code Llama Instruct (70B)                | codellama/CodeLlama-70b-Instruct-hf            | 4096           |
-| Meta                  | Code Llama Instruct (7B)                 | codellama/CodeLlama-7b-Instruct-hf             | 16384          |
-| Meta                  | LLaMA-2 Chat (70B)                       | meta-llama/Llama-2-70b-chat-hf                 | 4096           |
-| Meta                  | LLaMA-2 Chat (13B)                       | meta-llama/Llama-2-13b-chat-hf                 | 4096           |
-| Meta                  | LLaMA-2 Chat (7B)                        | meta-llama/Llama-2-7b-chat-hf                  | 4096           |
-| Meta                  | LLaMA-3 Chat (8B)                        | meta-llama/Llama-3-8b-chat-hf                  | 8192           |
-| Meta                  | LLaMA-3 Chat (70B)                       | meta-llama/Llama-3-70b-chat-hf                 | 8192           |
-| Meta                  | LLaMA-3 Chat (8B) Turbo                  | meta-llama/Meta-Llama-3-8B-Instruct-Turbo      | 8192           |
-| Meta                  | LLaMA-3 Chat (70B) Turbo                 | meta-llama/Meta-Llama-3-70B-Instruct-Turbo     | 8192           |
-| Meta                  | Llama 3.1 8B Instruct Turbo              | meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo    | 8192           |
-| Meta                  | Llama 3.1 70B Instruct Turbo             | meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo   | 8192           |
-| Meta                  | Llama 3.1 405B Instruct Turbo            | meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo  | 4096           |
-| mistralai             | Mistral (7B) Instruct                    | mistralai/Mistral-7B-Instruct-v0.1             | 8192           |
-| mistralai             | Mistral (7B) Instruct v0.2               | mistralai/Mistral-7B-Instruct-v0.2             | 32768          |
-| mistralai             | Mistral (7B) Instruct v0.3               | mistralai/Mistral-7B-Instruct-v0.3             | 32768          |
-| mistralai             | Mixtral-8x7B Instruct (46.7B)            | mistralai/Mixtral-8x7B-Instruct-v0.1           | 32768          |
-| mistralai             | Mixtral-8x22B Instruct (141B)            | mistralai/Mixtral-8x22B-Instruct-v0.1          | 65536          |
-| NousResearch          | Nous Capybara v1.9 (7B)                  | NousResearch/Nous-Capybara-7B-V1p9             | 8192           |
-| NousResearch          | Nous Hermes 2 - Mistral DPO (7B)         | NousResearch/Nous-Hermes-2-Mistral-7B-DPO      | 32768          |
-| NousResearch          | Nous Hermes 2 - Mixtral 8x7B-DPO (46.7B) | NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO    | 32768          |
-| NousResearch          | Nous Hermes 2 - Mixtral 8x7B-SFT (46.7B) | NousResearch/Nous-Hermes-2-Mixtral-8x7B-SFT    | 32768          |
-| NousResearch          | Nous Hermes LLaMA-2 (7B)                 | NousResearch/Nous-Hermes-llama-2-7b            | 4096           |
-| NousResearch          | Nous Hermes Llama-2 (13B)                | NousResearch/Nous-Hermes-Llama2-13b            | 4096           |
-| NousResearch          | Nous Hermes-2 Yi (34B)                   | NousResearch/Nous-Hermes-2-Yi-34B              | 4096           |
-| OpenChat              | OpenChat 3.5 (7B)                        | openchat/openchat-3.5-1210                     | 8192           |
-| OpenOrca              | OpenOrca Mistral (7B) 8K                 | Open-Orca/Mistral-7B-OpenOrca                  | 8192           |
-| Qwen                  | Qwen 1.5 Chat (0.5B)                     | Qwen/Qwen1.5-0.5B-Chat                         | 32768          |
-| Qwen                  | Qwen 1.5 Chat (1.8B)                     | Qwen/Qwen1.5-1.8B-Chat                         | 32768          |
-| Qwen                  | Qwen 1.5 Chat (4B)                       | Qwen/Qwen1.5-4B-Chat                           | 32768          |
-| Qwen                  | Qwen 1.5 Chat (7B)                       | Qwen/Qwen1.5-7B-Chat                           | 32768          |
-| Qwen                  | Qwen 1.5 Chat (14B)                      | Qwen/Qwen1.5-14B-Chat                          | 32768          |
-| Qwen                  | Qwen 1.5 Chat (32B)                      | Qwen/Qwen1.5-32B-Chat                          | 32768          |
-| Qwen                  | Qwen 1.5 Chat (72B)                      | Qwen/Qwen1.5-72B-Chat                          | 32768          |
-| Qwen                  | Qwen 1.5 Chat (110B)                     | Qwen/Qwen1.5-110B-Chat                         | 32768          |
-| Qwen                  | Qwen 2 Instruct (72B)                    | Qwen/Qwen2-72B-Instruct                        | 32768          |
-| Snorkel AI            | Snorkel Mistral PairRM DPO (7B)          | snorkelai/Snorkel-Mistral-PairRM-DPO           | 32768          |
-| Snowflake             | Snowflake Arctic Instruct                | Snowflake/snowflake-arctic-instruct            | 4096           |
-| Stanford              | Alpaca (7B)                              | togethercomputer/alpaca-7b                     | 2048           |
-| Teknium               | OpenHermes-2-Mistral (7B)                | teknium/OpenHermes-2-Mistral-7B                | 8192           |
-| Teknium               | OpenHermes-2.5-Mistral (7B)              | teknium/OpenHermes-2p5-Mistral-7B              | 8192           |
-| Together              | LLaMA-2-7B-32K-Instruct (7B)             | togethercomputer/Llama-2-7B-32K-Instruct       | 32768          |
-| Together              | RedPajama-INCITE Chat (3B)               | togethercomputer/RedPajama-INCITE-Chat-3B-v1   | 2048           |
-| Together              | RedPajama-INCITE Chat (7B)               | togethercomputer/RedPajama-INCITE-7B-Chat      | 2048           |
-| Together              | StripedHyena Nous (7B)                   | togethercomputer/StripedHyena-Nous-7B          | 32768          |
-| Undi95                | ReMM SLERP L2 (13B)                      | Undi95/ReMM-SLERP-L2-13B                       | 4096           |
-| Undi95                | Toppy M (7B)                             | Undi95/Toppy-M-7B                              | 4096           |
-| WizardLM              | WizardLM v1.2 (13B)                      | WizardLM/WizardLM-13B-V1.2                     | 4096           |
-| upstage               | Upstage SOLAR Instruct v1 (11B)          | upstage/SOLAR-10.7B-Instruct-v1.0              | 4096           |
\ No newline at end of file
+| Organization          | Model Name                               | API Model String                                 | Context length | Quantization |
+| :-------------------- | :--------------------------------------- | :----------------------------------------------- | :------------- | :----------- |
+| 01.AI                 | 01-ai Yi Chat (34B)                      | zero-one-ai/Yi-34B-Chat                          | 4096           | FP16         |
+| AllenAI               | OLMo Instruct (7B)                       | allenai/OLMo-7B-Instruct                         | 2048           | FP16         |
+| Austism               | Chronos Hermes (13B)                     | Austism/chronos-hermes-13b                       | 2048           | FP16         |
+| carson                | carson ml318br                           | carson/ml318br                                   | 8192           | FP16         |
+| cognitivecomputations | Dolphin 2.5 Mixtral 8x7b                 | cognitivecomputations/dolphin-2.5-mixtral-8x7b   | 32768          | FP16         |
+| Databricks            | DBRX Instruct                            | databricks/dbrx-instruct                         | 32768          | FP16         |
+| DeepSeek              | DeepSeek LLM Chat (67B)                  | deepseek-ai/deepseek-llm-67b-chat                | 4096           | FP16         |
+| DeepSeek              | Deepseek Coder Instruct (33B)            | deepseek-ai/deepseek-coder-33b-instruct          | 16384          | FP16         |
+| garage-bAInd          | Platypus2 Instruct (70B)                 | garage-bAInd/Platypus2-70B-instruct              | 4096           | FP16         |
+| google                | Gemma-2 Instruct (9B)                    | google/gemma-2-9b-it                             | 8192           | FP16         |
+| Google                | Gemma Instruct (2B)                      | google/gemma-2b-it                               | 8192           | FP16         |
+| Google                | Gemma-2 Instruct (27B)                   | google/gemma-2-27b-it                            | 8192           | FP16         |
+| Google                | Gemma Instruct (7B)                      | google/gemma-7b-it                               | 8192           | FP16         |
+| gradientai            | Llama-3 70B Instruct Gradient 1048K      | gradientai/Llama-3-70B-Instruct-Gradient-1048k   | 1048576        | FP16         |
+| Gryphe                | MythoMax-L2 (13B)                        | Gryphe/MythoMax-L2-13b                           | 4096           | FP16         |
+| Gryphe                | Gryphe MythoMax L2 Lite (13B)            | Gryphe/MythoMax-L2-13b-Lite                      | 4096           | FP16         |
+| Haotian Liu           | LLaVa-Next (Mistral-7B)                  | llava-hf/llava-v1.6-mistral-7b-hf                | 4096           | FP16         |
+| HuggingFace           | Zephyr-7B-ß                              | HuggingFaceH4/zephyr-7b-beta                     | 32768          | FP16         |
+| LM Sys                | Koala (7B)                               | togethercomputer/Koala-7B                        | 2048           | FP16         |
+| LM Sys                | Vicuna v1.3 (7B)                         | lmsys/vicuna-7b-v1.3                             | 2048           | FP16         |
+| LM Sys                | Vicuna v1.5 16K (13B)                    | lmsys/vicuna-13b-v1.5-16k                        | 16384          | FP16         |
+| LM Sys                | Vicuna v1.5 (13B)                        | lmsys/vicuna-13b-v1.5                            | 4096           | FP16         |
+| LM Sys                | Vicuna v1.3 (13B)                        | lmsys/vicuna-13b-v1.3                            | 2048           | FP16         |
+| LM Sys                | Koala (13B)                              | togethercomputer/Koala-13B                       | 2048           | FP16         |
+| LM Sys                | Vicuna v1.5 (7B)                         | lmsys/vicuna-7b-v1.5                             | 4096           | FP16         |
+| Meta                  | Code Llama Instruct (34B)                | codellama/CodeLlama-34b-Instruct-hf              | 16384          | FP16         |
+| Meta                  | Llama3 8B Chat HF INT4                   | togethercomputer/Llama-3-8b-chat-hf-int4         | 8192           | FP16         |
+| Meta                  | Meta Llama 3.2 90B Vision Instruct Turbo | meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo   | 131072         | FP16         |
+| Meta                  | Meta Llama 3.2 11B Vision Instruct Turbo | meta-llama/Llama-3.2-11B-Vision-Instruct-Turbo   | 131072         | FP16         |
+| Meta                  | Meta Llama 3.2 3B Instruct Turbo         | meta-llama/Llama-3.2-3B-Instruct-Turbo           | 131072         | FP16         |
+| Meta                  | Togethercomputer Llama3 8B Instruct Int8 | togethercomputer/Llama-3-8b-chat-hf-int8         | 8192           | FP16         |
+| Meta                  | Meta Llama 3.1 70B Instruct Turbo        | meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo     | 32768          | FP8          |
+| Meta                  | LLaMA-2 Chat (13B)                       | meta-llama/Llama-2-13b-chat-hf                   | 4096           | FP16         |
+| Meta                  | Meta Llama 3 70B Instruct Lite           | meta-llama/Meta-Llama-3-70B-Instruct-Lite        | 8192           | INT4         |
+| Meta                  | Meta Llama 3 8B Instruct Reference       | meta-llama/Llama-3-8b-chat-hf                    | 8192           | FP16         |
+| Meta                  | Meta Llama 3 70B Instruct Reference      | meta-llama/Llama-3-70b-chat-hf                   | 8192           | FP16         |
+| Meta                  | Meta Llama 3 8B Instruct Turbo           | meta-llama/Meta-Llama-3-8B-Instruct-Turbo        | 8192           | FP8          |
+| Meta                  | Meta Llama 3 8B Instruct Lite            | meta-llama/Meta-Llama-3-8B-Instruct-Lite         | 8192           | INT4         |
+| Meta                  | Meta Llama 3.1 405B Instruct Turbo       | meta-llama/Meta-Llama-3.1-405B-Instruct-Lite-Pro | 4096           | FP16         |
+| Meta                  | LLaMA-2 Chat (7B)                        | meta-llama/Llama-2-7b-chat-hf                    | 4096           | FP16         |
+| Meta                  | Meta Llama 3.1 405B Instruct Turbo       | meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo    | 130815         | FP8          |
+| Meta                  | Meta Llama Vision Free                   | meta-llama/Llama-Vision-Free                     | 131072         | FP16         |
+| Meta                  | Meta Llama 3 70B Instruct Turbo          | meta-llama/Meta-Llama-3-70B-Instruct-Turbo       | 8192           | FP8          |
+| Meta                  | Meta Llama 3.1 8B Instruct Turbo         | meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo      | 32768          | FP8          |
+| Meta                  | Code Llama Instruct (7B)                 | togethercomputer/CodeLlama-7b-Instruct           | 16384          | FP16         |
+| Meta                  | Code Llama Instruct (34B)                | togethercomputer/CodeLlama-34b-Instruct          | 16384          | FP16         |
+| Meta                  | Code Llama Instruct (13B)                | codellama/CodeLlama-13b-Instruct-hf              | 16384          | FP16         |
+| Meta                  | Code Llama Instruct (13B)                | togethercomputer/CodeLlama-13b-Instruct          | 16384          | FP16         |
+| Meta                  | LLaMA-2 Chat (13B)                       | togethercomputer/llama-2-13b-chat                | 4096           | FP16         |
+| Meta                  | LLaMA-2 Chat (7B)                        | togethercomputer/llama-2-7b-chat                 | 4096           | FP16         |
+| Meta                  | Meta Llama 3 8B Instruct                 | meta-llama/Meta-Llama-3-8B-Instruct              | 8192           | FP16         |
+| Meta                  | Meta Llama 3 70B Instruct                | meta-llama/Meta-Llama-3-70B-Instruct             | 8192           | FP16         |
+| Meta                  | Code Llama Instruct (70B)                | codellama/CodeLlama-70b-Instruct-hf              | 4096           | FP16         |
+| Meta                  | LLaMA-2 Chat (70B)                       | togethercomputer/llama-2-70b-chat                | 4096           | FP16         |
+| Meta                  | Code Llama Instruct (7B)                 | codellama/CodeLlama-7b-Instruct-hf               | 16384          | FP16         |
+| Meta                  | LLaMA-2 Chat (70B)                       | meta-llama/Llama-2-70b-chat-hf                   | 4096           | FP16         |
+| Meta                  | Meta Llama 3.1 8B Instruct               | meta-llama/Meta-Llama-3.1-8B-Instruct-Reference  | 16384          | FP16         |
+| Meta                  | Meta Llama 3.1 70B Instruct Turbo        | albert/meta-llama-3-1-70b-instruct-turbo         | 131072         | FP16         |
+| Meta                  | Meta Llama 3.1 70B Instruct              | meta-llama/Meta-Llama-3.1-70B-Instruct-Reference | 8192           | FP16         |
+| Meta                  | (Free) Llama 3.2 11B Vision Instruct Turbo | meta-llama/Llama-Vision-Free                   | 131072         | FP16         |
+| Meta                  | Llama 3.2 11B Vision Instruct Turbo       | meta-llama/Llama-3.2-11B-Vision-Instruct-Turbo | 131072         | FP16         |
+| Meta                  | Llama 3.2 90B Vision Instruct Turbo       | meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo | 131072         | FP16         |
+| microsoft             | WizardLM-2 (8x22B)                       | microsoft/WizardLM-2-8x22B                       | 65536          | FP16         |
+| mistralai             | Mistral (7B) Instruct                    | mistralai/Mistral-7B-Instruct-v0.1               | 4096           | FP16         |
+| mistralai             | Mistral (7B) Instruct v0.2               | mistralai/Mistral-7B-Instruct-v0.2               | 32768          | FP16         |
+| mistralai             | Mistral (7B) Instruct v0.3               | mistralai/Mistral-7B-Instruct-v0.3               | 32768          | FP16         |
+| mistralai             | Mixtral-8x7B Instruct v0.1               | mistralai/Mixtral-8x7B-Instruct-v0.1             | 32768          | FP16         |
+| mistralai             | Mixtral-8x22B Instruct v0.1              | mistralai/Mixtral-8x22B-Instruct-v0.1            | 65536          | FP16         |
+| NousResearch          | Nous Hermes 2 - Mixtral 8x7B-DPO         | NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO      | 32768          | FP16         |
+| NousResearch          | Nous Hermes LLaMA-2 (70B)                | NousResearch/Nous-Hermes-Llama2-70b              | 4096           | FP16         |
+| NousResearch          | Nous Hermes 2 - Mixtral 8x7B-SFT         | NousResearch/Nous-Hermes-2-Mixtral-8x7B-SFT      | 32768          | FP16         |
+| NousResearch          | Nous Hermes Llama-2 (13B)                | NousResearch/Nous-Hermes-Llama2-13b              | 4096           | FP16         |
+| NousResearch          | Nous Hermes 2 - Mistral DPO (7B)         | NousResearch/Nous-Hermes-2-Mistral-7B-DPO        | 32768          | FP16         |
+| NousResearch          | Nous Hermes LLaMA-2 (7B)                 | NousResearch/Nous-Hermes-llama-2-7b              | 4096           | FP16         |
+| NousResearch          | Nous Capybara v1.9 (7B)                  | NousResearch/Nous-Capybara-7B-V1p9               | 8192           | FP16         |
+| NousResearch          | Hermes 2 Theta Llama-3 70B               | NousResearch/Hermes-2-Theta-Llama-3-70B          | 8192           | FP16         |
+| OpenChat              | OpenChat 3.5                             | openchat/openchat-3.5-1210                       | 8192           | FP16         |
+| OpenOrca              | OpenOrca Mistral (7B) 8K                 | Open-Orca/Mistral-7B-OpenOrca                    | 8192           | FP16         |
+| Qwen                  | Qwen 2 Instruct (72B)                    | Qwen/Qwen2-72B-Instruct                          | 32768          | FP16         |
+| Qwen                  | Qwen2.5 72B Instruct Turbo               | Qwen/Qwen2.5-72B-Instruct-Turbo                  | 32768          | FP8          |
+| Qwen                  | Qwen2.5 7B Instruct Turbo                | Qwen/Qwen2.5-7B-Instruct-Turbo                   | 32768          | FP8          |
+| Qwen                  | Qwen 1.5 Chat (110B)                     | Qwen/Qwen1.5-110B-Chat                           | 32768          | FP16         |
+| Qwen                  | Qwen 1.5 Chat (72B)                      | Qwen/Qwen1.5-72B-Chat                            | 32768          | FP16         |
+| Qwen                  | Qwen 2 Instruct (1.5B)                   | Qwen/Qwen2-1.5B-Instruct                         | 32768          | FP16         |
+| Qwen                  | Qwen 2 Instruct (7B)                     | Qwen/Qwen2-7B-Instruct                           | 32768          | FP16         |
+| Qwen                  | Qwen 1.5 Chat (14B)                      | Qwen/Qwen1.5-14B-Chat                            | 32768          | FP16         |
+| Qwen                  | Qwen 1.5 Chat (1.8B)                     | Qwen/Qwen1.5-1.8B-Chat                           | 32768          | FP16         |
+| Qwen                  | Qwen 1.5 Chat (32B)                      | Qwen/Qwen1.5-32B-Chat                            | 32768          | FP16         |
+| Qwen                  | Qwen 1.5 Chat (7B)                       | Qwen/Qwen1.5-7B-Chat                             | 32768          | FP16         |
+| Qwen                  | Qwen 1.5 Chat (0.5B)                     | Qwen/Qwen1.5-0.5B-Chat                           | 32768          | FP16         |
+| Qwen                  | Qwen 1.5 Chat (4B)                       | Qwen/Qwen1.5-4B-Chat                             | 32768          | FP16         |
+| Snorkel AI            | Snorkel Mistral PairRM DPO (7B)          | snorkelai/Snorkel-Mistral-PairRM-DPO             | 32768          | FP16         |
+| Snowflake             | Snowflake Arctic Instruct                | Snowflake/snowflake-arctic-instruct              | 4096           | FP16         |
+| Stanford              | Alpaca (7B)                              | togethercomputer/alpaca-7b                       | 2048           | FP16         |
+| teknium               | OpenHermes-2-Mistral (7B)                | teknium/OpenHermes-2-Mistral-7B                  | 8192           | FP16         |
+| teknium               | OpenHermes-2.5-Mistral (7B)              | teknium/OpenHermes-2p5-Mistral-7B                | 8192           | FP16         |
+| test                  | Test 11                                  | test/test11                                      | 4096           | FP16         |
+| Tim Dettmers          | Guanaco (65B)                            | togethercomputer/guanaco-65b                     | 2048           | FP16         |
+| Tim Dettmers          | Guanaco (13B)                            | togethercomputer/guanaco-13b                     | 2048           | FP16         |
+| Tim Dettmers          | Guanaco (33B)                            | togethercomputer/guanaco-33b                     | 2048           | FP16         |
+| Tim Dettmers          | Guanaco (7B)                             | togethercomputer/guanaco-7b                      | 2048           | FP16         |
+| Undi95                | ReMM SLERP L2 (13B)                      | Undi95/ReMM-SLERP-L2-13B                         | 4096           | FP16         |
+| Undi95                | Toppy M (7B)                             | Undi95/Toppy-M-7B                                | 4096           | FP16         |
+| upstage               | Upstage SOLAR Instruct v1 (11B)          | upstage/SOLAR-10.7B-Instruct-v1.0                | 4096           | FP16         |
+| upstage               | Upstage SOLAR Instruct v1 (11B)-Int4     | togethercomputer/SOLAR-10.7B-Instruct-v1.0-int4  | 4096           | FP16         |
+| WizardLM              | WizardLM v1.2 (13B)                      | WizardLM/WizardLM-13B-V1.2                       | 4096           | FP16         |
\ No newline at end of file
diff --git a/server/utils/AiProviders/togetherAi/scripts/parse.mjs b/server/utils/AiProviders/togetherAi/scripts/parse.mjs
index e803e729577e1b4ba11383319a1ce9673c7e8060..3769d8c632bc774553d428eed2e213cb91ba3d82 100644
--- a/server/utils/AiProviders/togetherAi/scripts/parse.mjs
+++ b/server/utils/AiProviders/togetherAi/scripts/parse.mjs
@@ -8,7 +8,7 @@
 // copy outputs into the export in ../models.js
 
 // Update the date below if you run this again because TogetherAI added new models.
-// Last Collected: Nov 18, 2024
+// Last Collected: Nov 20, 2024
 // Since last collection Together's docs are broken. I just copied the HTML table
 // and had claude3 convert to markdown and it works well enough.