From fcdaf03adffdd98c94cf251a70c03f70a239f292 Mon Sep 17 00:00:00 2001
From: deep1401 <gandhi0869@gmail.com>
Date: Thu, 20 Feb 2025 10:14:51 -0800
Subject: [PATCH] Change route for batched prompts and fix bug within
 GenerateModal

---
 .../components/Experiment/Generate/GenerateModal.tsx   | 10 ++++++++--
 src/renderer/lib/transformerlab-api-sdk.ts             |  8 ++++----
 2 files changed, 12 insertions(+), 6 deletions(-)

diff --git a/src/renderer/components/Experiment/Generate/GenerateModal.tsx b/src/renderer/components/Experiment/Generate/GenerateModal.tsx
index 5d410b0d..92546792 100644
--- a/src/renderer/components/Experiment/Generate/GenerateModal.tsx
+++ b/src/renderer/components/Experiment/Generate/GenerateModal.tsx
@@ -150,11 +150,13 @@ export default function GenerateModal({
                 ).some((key) => key.toLowerCase().includes('context'));
 
                 setHasDatasetKey(datasetKeyExists);
-                setHasDocumentsKey(docsKeyExists);
-                setHasContextKey(contextKeyExists);
+                // setHasDocumentsKey(docsKeyExists);
+                // setHasContextKey(contextKeyExists);
 
                 if (docsKeyExists && evalConfig.script_parameters.docs.length > 0) {
                   // const docstemp = evalConfig.script_parameters.docs.split(',').map((path) => ({ path }));
+                  setHasContextKey(false);
+                  setHasDocumentsKey(true);
                   const docPaths = evalConfig.script_parameters.docs.split(',');
                   const docNames = evalConfig.script_parameters.doc_names.split(',');
                   // const docFiles = docPaths.map((path) => new File([], path));
@@ -165,6 +167,8 @@ export default function GenerateModal({
 
                 }
                 else if (contextKeyExists && evalConfig.script_parameters.context.length > 0) {
+                  setHasContextKey(true);
+                  setHasDocumentsKey(false);
                   const context = evalConfig.script_parameters.context;
                   setContextInput(context);
                   delete evalConfig.script_parameters.context;
@@ -392,6 +396,8 @@ export default function GenerateModal({
         formJson.generation_type = 'scratch';
       }
 
+      console.log('formJson', formJson);
+
 
       // Run when the currentEvalName is provided
       if (currentEvalName && currentEvalName !== '') {
diff --git a/src/renderer/lib/transformerlab-api-sdk.ts b/src/renderer/lib/transformerlab-api-sdk.ts
index 1b9fa08e..9d249a9d 100644
--- a/src/renderer/lib/transformerlab-api-sdk.ts
+++ b/src/renderer/lib/transformerlab-api-sdk.ts
@@ -727,7 +727,7 @@ export async function sendBatchedChat(
   // }
   // console.log("RESULTS", results);
   let response;
-  const batchedChatUrl = `${API_URL()}batched_prompts/batch_predict`;
+  const batchedChatUrl = `${API_URL()}batch/chat/completions`;
   try {
     response = await fetch(batchedChatUrl, {
       method: 'POST', // or 'PUT'
@@ -1158,10 +1158,10 @@ Endpoints.Prompts = {
 };
 
 Endpoints.BatchedPrompts = {
-  List: () => API_URL() + 'batched_prompts/list',
-  New: () => API_URL() + 'batched_prompts/new',
+  List: () => API_URL() + 'batch/list',
+  New: () => API_URL() + 'batch/new',
   Delete: (promptId: string) =>
-    API_URL() + 'batched_prompts/delete/' + promptId,
+    API_URL() + 'batch/delete/' + promptId,
 };
 
 Endpoints.Tools = {
-- 
GitLab