diff --git a/server/utils/EmbeddingEngines/azureOpenAi/index.js b/server/utils/EmbeddingEngines/azureOpenAi/index.js
index 8959b00070dfdf451d7f1b83e43ff95db9ba181e..3f36b576b98e75d53629d26da9e6e58f2b0a1161 100644
--- a/server/utils/EmbeddingEngines/azureOpenAi/index.js
+++ b/server/utils/EmbeddingEngines/azureOpenAi/index.js
@@ -14,7 +14,7 @@ class AzureOpenAiEmbedder {
     );
     this.openai = openai;
 
-    // The maximum amount of "inputs" that OpenAI API can process in a single call.
+    // Limit of how many strings we can process in a single pass to stay with resource or network limits
     // https://learn.microsoft.com/en-us/azure/ai-services/openai/faq#i-am-trying-to-use-embeddings-and-received-the-error--invalidrequesterror--too-many-inputs--the-max-number-of-inputs-is-1---how-do-i-fix-this-:~:text=consisting%20of%20up%20to%2016%20inputs%20per%20API%20request
     this.embeddingMaxChunkLength = 16;
   }
diff --git a/server/utils/EmbeddingEngines/localAi/index.js b/server/utils/EmbeddingEngines/localAi/index.js
index 0f43cc7dc6579514caea931e3468a4193a802ec3..68fe66544932f5b3fb4c0de6f45f3dbe026425fb 100644
--- a/server/utils/EmbeddingEngines/localAi/index.js
+++ b/server/utils/EmbeddingEngines/localAi/index.js
@@ -17,7 +17,7 @@ class LocalAiEmbedder {
     });
     this.openai = new OpenAIApi(config);
 
-    // Arbitrary limit of string size in chars to ensure we stay within reasonable POST request size.
+    // Limit of how many strings we can process in a single pass to stay with resource or network limits
     this.embeddingMaxChunkLength = maximumChunkLength();
   }
 
diff --git a/server/utils/EmbeddingEngines/native/index.js b/server/utils/EmbeddingEngines/native/index.js
index 2081e3fdee4fe66b51ca4db0e054038f59db4a89..81189d4e2e7438cf34166174b92b19f28a630862 100644
--- a/server/utils/EmbeddingEngines/native/index.js
+++ b/server/utils/EmbeddingEngines/native/index.js
@@ -13,8 +13,8 @@ class NativeEmbedder {
     );
     this.modelPath = path.resolve(this.cacheDir, "Xenova", "all-MiniLM-L6-v2");
 
-    // Arbitrary limit of string size in chars to ensure we stay within reasonable POST request size.
-    this.embeddingMaxChunkLength = 1_000;
+    // Limit of how many strings we can process in a single pass to stay with resource or network limits
+    this.embeddingMaxChunkLength = 50;
 
     // Make directory when it does not exist in existing installations
     if (!fs.existsSync(this.cacheDir)) fs.mkdirSync(this.cacheDir);
diff --git a/server/utils/EmbeddingEngines/openAi/index.js b/server/utils/EmbeddingEngines/openAi/index.js
index 8cfa235169c01080c23f53a28f46daf81c6b22cd..6ba38c898d911fdcb971e0a12fa995d9d172958b 100644
--- a/server/utils/EmbeddingEngines/openAi/index.js
+++ b/server/utils/EmbeddingEngines/openAi/index.js
@@ -10,7 +10,7 @@ class OpenAiEmbedder {
     const openai = new OpenAIApi(config);
     this.openai = openai;
 
-    // Arbitrary limit of string size in chars to ensure we stay within reasonable POST request size.
+    // Limit of how many strings we can process in a single pass to stay with resource or network limits
     this.embeddingMaxChunkLength = 1_000;
   }