From a048cf451a4acff86812d3e2fb5553385fd9ceaa Mon Sep 17 00:00:00 2001
From: timothycarambat <rambat1010@gmail.com>
Date: Thu, 10 Aug 2023 13:50:17 -0700
Subject: [PATCH] hot fix storage path for unix

---
 server/endpoints/utils.js                     |  2 +-
 .../utils/vectorDbProviders/weaviate/index.js | 21 ++++++++++---------
 2 files changed, 12 insertions(+), 11 deletions(-)

diff --git a/server/endpoints/utils.js b/server/endpoints/utils.js
index 224217806..0e6eb986c 100644
--- a/server/endpoints/utils.js
+++ b/server/endpoints/utils.js
@@ -14,7 +14,7 @@ function byteToGigaByte(n) {
 async function getDiskStorage() {
   try {
     const checkDiskSpace = require("check-disk-space").default;
-    const { free, size } = await checkDiskSpace("/dev/xvda");
+    const { free, size } = await checkDiskSpace("/");
     return {
       current: Math.floor(byteToGigaByte(free)),
       capacity: Math.floor(byteToGigaByte(size)),
diff --git a/server/utils/vectorDbProviders/weaviate/index.js b/server/utils/vectorDbProviders/weaviate/index.js
index 884c08e01..99e0859fe 100644
--- a/server/utils/vectorDbProviders/weaviate/index.js
+++ b/server/utils/vectorDbProviders/weaviate/index.js
@@ -357,10 +357,10 @@ const Weaviate = {
       content: `${chatPrompt(workspace)}
     Context:
     ${contextTexts
-          .map((text, i) => {
-            return `[CONTEXT ${i}]:\n${text}\n[END CONTEXT ${i}]\n\n`;
-          })
-          .join("")}`,
+      .map((text, i) => {
+        return `[CONTEXT ${i}]:\n${text}\n[END CONTEXT ${i}]\n\n`;
+      })
+      .join("")}`,
     };
     const memory = [prompt, { role: "user", content: input }];
     const responseText = await LLMConnector.getChatCompletion(memory, {
@@ -407,10 +407,10 @@ const Weaviate = {
       content: `${chatPrompt(workspace)}
     Context:
     ${contextTexts
-          .map((text, i) => {
-            return `[CONTEXT ${i}]:\n${text}\n[END CONTEXT ${i}]\n\n`;
-          })
-          .join("")}`,
+      .map((text, i) => {
+        return `[CONTEXT ${i}]:\n${text}\n[END CONTEXT ${i}]\n\n`;
+      })
+      .join("")}`,
     };
     const memory = [prompt, ...chatHistory, { role: "user", content: input }];
     const responseText = await LLMConnector.getChatCompletion(memory, {
@@ -438,8 +438,9 @@ const Weaviate = {
     const details = await this.namespace(client, namespace);
     await this.deleteVectorsInNamespace(client, namespace);
     return {
-      message: `Namespace ${camelCase(namespace)} was deleted along with ${details?.vectorCount
-        } vectors.`,
+      message: `Namespace ${camelCase(namespace)} was deleted along with ${
+        details?.vectorCount
+      } vectors.`,
     };
   },
   reset: async function () {
-- 
GitLab