diff --git a/.changeset/fix_refine_synthesizer_empty_source_nodes_behavior.md b/.changeset/fix_refine_synthesizer_empty_source_nodes_behavior.md
new file mode 100644
index 0000000000000000000000000000000000000000..a8fb9136145e5c9823755cc6d335f18b3041e15b
--- /dev/null
+++ b/.changeset/fix_refine_synthesizer_empty_source_nodes_behavior.md
@@ -0,0 +1,5 @@
+---
+"@llamaindex/core": patch
+---
+
+Refine synthesizer will now return an empty string as the response if an empty array of source nodes were provided. Before it would throw an internal error converting undefined to ReadableStream.
diff --git a/packages/core/src/response-synthesizers/factory.ts b/packages/core/src/response-synthesizers/factory.ts
index b513c2348017cb46884d98450a67a0f4da7050de..0e6ae17537ed3c794c6ac368e59a2159b0f8fabd 100644
--- a/packages/core/src/response-synthesizers/factory.ts
+++ b/packages/core/src/response-synthesizers/factory.ts
@@ -116,7 +116,14 @@ class Refine extends BaseSynthesizer {
       }
     }
 
-    // fixme: no source nodes provided, cannot fix right now due to lack of context
+    if (response === undefined) {
+      response = stream
+        ? (async function* () {
+            yield "";
+          })()
+        : "";
+    }
+
     if (typeof response === "string") {
       return EngineResponse.fromResponse(response, false, nodes);
     } else {
diff --git a/packages/core/tests/memory/chat-memory-buffer.test.ts b/packages/core/tests/memory/chat-memory-buffer.test.ts
index 92f13c59bbb7f4abc46a81df4442b2b523dcf0f8..51a7a0c376e5c7c5384dfb84cc866bae38318e60 100644
--- a/packages/core/tests/memory/chat-memory-buffer.test.ts
+++ b/packages/core/tests/memory/chat-memory-buffer.test.ts
@@ -66,9 +66,9 @@ describe("ChatMemoryBuffer", () => {
     expect(result).toEqual([...inputMessages, ...storedMessages]);
   });
 
-  test("getMessages throws error when initial token count exceeds limit", () => {
+  test("getMessages throws error when initial token count exceeds limit", async () => {
     const buffer = new ChatMemoryBuffer({ tokenLimit: 10 });
-    expect(async () => buffer.getMessages(undefined, 20)).rejects.toThrow(
+    await expect(async () => buffer.getMessages(undefined, 20)).rejects.toThrow(
       "Initial token count exceeds token limit",
     );
   });
diff --git a/packages/core/tests/response-synthesizers/compact-and-refine.test.ts b/packages/core/tests/response-synthesizers/compact-and-refine.test.ts
index fa3cad252e6ad9da1889244b6427a35d1c269828..8bc20e9f8b658dd87b99b5caf78adec7513ee041 100644
--- a/packages/core/tests/response-synthesizers/compact-and-refine.test.ts
+++ b/packages/core/tests/response-synthesizers/compact-and-refine.test.ts
@@ -1,4 +1,4 @@
-import { describe, expect, test, vi } from "vitest";
+import { beforeEach, describe, expect, test, vi } from "vitest";
 import type { LLMMetadata } from "../../llms/dist/index.js";
 import { getResponseSynthesizer } from "../../response-synthesizers/dist/index.js";
 import { Document } from "../../schema/dist/index.js";
@@ -10,26 +10,69 @@ const mockLllm = () => ({
       return response;
     }
 
-    function* gen() {
-      // yield a few times to make sure each chunk has the sourceNodes
-      yield response;
-      yield response;
-      yield response;
-    }
-
-    return gen();
+    return {
+      [Symbol.asyncIterator]: function* gen() {
+        // yield a few times to make sure each chunk has the sourceNodes
+        yield response;
+        yield response;
+        yield response;
+      },
+    };
   }),
   chat: vi.fn(),
   metadata: {} as unknown as LLMMetadata,
 });
 
+describe("refine response synthesizer", () => {
+  let synthesizer: ReturnType<typeof getResponseSynthesizer<"refine">>;
+  // eslint-disable-next-line @typescript-eslint/no-explicit-any
+  const isAsyncIterable = (obj: any): boolean =>
+    obj[Symbol.asyncIterator] !== undefined;
+
+  beforeEach(() => {
+    synthesizer = getResponseSynthesizer("refine", {
+      llm: mockLllm(),
+    });
+  });
+
+  describe("getResponse", () => {
+    test("should return async iterable of EngineResponse when stream is true and sourceNodes are empty", async () => {
+      const response = await synthesizer.getResponse(
+        "unimportant query",
+        [],
+        true,
+      );
+
+      expect(isAsyncIterable(response)).toBe(true);
+      for await (const chunk of response) {
+        expect(chunk.message.content).toEqual("");
+      }
+    });
+
+    test("should return non async iterable when stream is false and sourceNodes are empty", async () => {
+      const response = await synthesizer.getResponse(
+        "unimportant query",
+        [],
+        false,
+      );
+
+      expect(isAsyncIterable(response)).toBe(false);
+      expect(response.message.content).toEqual("");
+    });
+  });
+});
+
 describe("compact and refine response synthesizer", () => {
+  let synthesizer: ReturnType<typeof getResponseSynthesizer<"compact">>;
+
+  beforeEach(() => {
+    synthesizer = getResponseSynthesizer("compact", {
+      llm: mockLllm(),
+    });
+  });
+
   describe("synthesize", () => {
     test("should return original sourceNodes with response when stream = false", async () => {
-      const synthesizer = getResponseSynthesizer("compact", {
-        llm: mockLllm(),
-      });
-
       const sourceNode = { node: new Document({}), score: 1 };
 
       const response = await synthesizer.synthesize(
@@ -44,10 +87,6 @@ describe("compact and refine response synthesizer", () => {
     });
 
     test("should return original sourceNodes with response when stream = true", async () => {
-      const synthesizer = getResponseSynthesizer("compact", {
-        llm: mockLllm(),
-      });
-
       const sourceNode = { node: new Document({}), score: 1 };
 
       const response = await synthesizer.synthesize(