Skip to content
Snippets Groups Projects
Unverified Commit e0f6cc3b authored by Gunnar Holwerda's avatar Gunnar Holwerda Committed by GitHub
Browse files

fix: return actual source nodes with compact and refine response synt… (#1554)

parent 8386510d
No related branches found
No related tags found
No related merge requests found
---
"@llamaindex/core": patch
---
The compact and refine response synthesizer (retrieved by using `getResponseSynthesizer('compact')`) has been fixed to return the original source nodes that were provided to it in its response. Previous to this it was returning the compacted text chunk documents.
......@@ -77,6 +77,16 @@ class Refine extends BaseSynthesizer {
}
}
async getResponse(
query: MessageContent,
nodes: NodeWithScore[],
stream: true,
): Promise<AsyncIterable<EngineResponse>>;
async getResponse(
query: MessageContent,
nodes: NodeWithScore[],
stream: false,
): Promise<EngineResponse>;
async getResponse(
query: MessageContent,
nodes: NodeWithScore[],
......@@ -197,6 +207,16 @@ class Refine extends BaseSynthesizer {
* CompactAndRefine is a slight variation of Refine that first compacts the text chunks into the smallest possible number of chunks.
*/
class CompactAndRefine extends Refine {
async getResponse(
query: MessageContent,
nodes: NodeWithScore[],
stream: true,
): Promise<AsyncIterable<EngineResponse>>;
async getResponse(
query: MessageContent,
nodes: NodeWithScore[],
stream: false,
): Promise<EngineResponse>;
async getResponse(
query: MessageContent,
nodes: NodeWithScore[],
......@@ -216,17 +236,24 @@ class CompactAndRefine extends Refine {
const newTexts = this.promptHelper.repack(maxPrompt, textChunks);
const newNodes = newTexts.map((text) => new TextNode({ text }));
if (stream) {
return super.getResponse(
const streamResponse = await super.getResponse(
query,
newNodes.map((node) => ({ node })),
true,
);
return streamConverter(streamResponse, (chunk) => {
chunk.sourceNodes = nodes;
return chunk;
});
}
return super.getResponse(
const originalResponse = await super.getResponse(
query,
newNodes.map((node) => ({ node })),
false,
);
originalResponse.sourceNodes = nodes;
return originalResponse;
}
}
......
import { describe, expect, test, vi } from "vitest";
import type { LLMMetadata } from "../../llms/dist/index.js";
import { getResponseSynthesizer } from "../../response-synthesizers/dist/index.js";
import { Document } from "../../schema/dist/index.js";
const mockLllm = () => ({
complete: vi.fn().mockImplementation(({ stream }) => {
const response = { text: "unimportant" };
if (!stream) {
return response;
}
function* gen() {
// yield a few times to make sure each chunk has the sourceNodes
yield response;
yield response;
yield response;
}
return gen();
}),
chat: vi.fn(),
metadata: {} as unknown as LLMMetadata,
});
describe("compact and refine response synthesizer", () => {
describe("synthesize", () => {
test("should return original sourceNodes with response when stream = false", async () => {
const synthesizer = getResponseSynthesizer("compact", {
llm: mockLllm(),
});
const sourceNode = { node: new Document({}), score: 1 };
const response = await synthesizer.synthesize(
{
query: "test",
nodes: [sourceNode],
},
false,
);
expect(response.sourceNodes).toEqual([sourceNode]);
});
test("should return original sourceNodes with response when stream = true", async () => {
const synthesizer = getResponseSynthesizer("compact", {
llm: mockLllm(),
});
const sourceNode = { node: new Document({}), score: 1 };
const response = await synthesizer.synthesize(
{
query: "test",
nodes: [sourceNode],
},
true,
);
for await (const chunk of response) {
expect(chunk.sourceNodes).toEqual([sourceNode]);
}
});
});
});
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment