diff --git a/src/renderer/components/Experiment/Eval/EditEvalModal.tsx b/src/renderer/components/Experiment/Eval/EditEvalModal.tsx
index c6973da7fdb0ac301c1b9c95c9b2fce86931e081..e0208b91b053064b06b5e0564d2b4a83d66be56a 100644
--- a/src/renderer/components/Experiment/Eval/EditEvalModal.tsx
+++ b/src/renderer/components/Experiment/Eval/EditEvalModal.tsx
@@ -53,12 +53,14 @@ export default function TrainingModalLoRA({
   onClose,
   experimentInfo,
   pluginId,
+  currentEvalName,
 }: {
   open: boolean;
   onClose: () => void;
   experimentInfo: any;
   template_id?: string;
   pluginId: string;
+  currentEvalName: string;
 }) {
   // Store the current selected Dataset in this modal
   const [selectedDataset, setSelectedDataset] = useState(null);
@@ -81,6 +83,28 @@ export default function TrainingModalLoRA({
       return chatAPI.Endpoints.Dataset.Info(selectedDataset);
     }, fetcher);
 
+  // Set config to the plugin config if it is available based on currentEvalName within experiment info
+  useEffect(() => {
+    if (experimentInfo && currentEvalName && pluginId) {
+      const evaluationsStr = experimentInfo.config?.evaluations;
+      if (typeof evaluationsStr === 'string') {
+        try {
+          const evaluations = JSON.parse(evaluationsStr);
+          if (Array.isArray(evaluations)) {
+            const evalConfig = evaluations.find(
+              (evalItem: any) => evalItem.name === currentEvalName && evalItem.plugin === pluginId
+            );
+            if (evalConfig) {
+              setConfig(evalConfig.script_parameters);
+            }
+          }
+        } catch (error) {
+          console.error('Failed to parse evaluations JSON string:', error);
+        }
+      }
+    }
+  }, [experimentInfo, currentEvalName, pluginId]);
+
   if (!experimentInfo?.id) {
     return 'Select an Experiment';
   }
@@ -89,6 +113,11 @@ export default function TrainingModalLoRA({
     ? experimentInfo?.config?.foundation_filename
     : experimentInfo?.config?.foundation;
 
+  // Set config to the plugin config if it is available based on currentEvalName within experiment info
+
+
+
+
   function TrainingModalFirstTab() {
     return (
       <Stack spacing={2}>
diff --git a/src/renderer/components/Experiment/Eval/EvalTasksTable.tsx b/src/renderer/components/Experiment/Eval/EvalTasksTable.tsx
index c2c2c37a4260bab3388cb6aebe5d9d720ec4ae12..9a25d5872706044767c9112135a0cbcb86a5210b 100644
--- a/src/renderer/components/Experiment/Eval/EvalTasksTable.tsx
+++ b/src/renderer/components/Experiment/Eval/EvalTasksTable.tsx
@@ -40,6 +40,7 @@ export default function EvalTasksTable({
 }) {
   const [open, setOpen] = useState(false);
   const [currentPlugin, setCurrentPlugin] = useState('');
+  const [currentEvalName, setCurrentEvalName] = useState('');
 
   return (
     <>
@@ -50,6 +51,7 @@ export default function EvalTasksTable({
         }}
         experimentInfo={experimentInfo}
         pluginId={currentPlugin}
+        currentEvalName={currentEvalName}
       />
       <Table aria-label="basic table" stickyHeader>
         <thead>
@@ -92,6 +94,7 @@ export default function EvalTasksTable({
                         onClick={() => {
                           setOpen(true);
                           setCurrentPlugin(evaluations?.plugin);
+                          setCurrentEvalName(evaluations.name);
                         }}
                       >
                         Edit