Skip to content
Snippets Groups Projects
Commit 1008b775 authored by Marcus Schiesser's avatar Marcus Schiesser
Browse files

test: cleaned up tests and added test to ignore duplicates

parent 41210dfc
No related branches found
No related tags found
No related merge requests found
import { storageContextFromDefaults } from "llamaindex/storage/StorageContext"; import {
storageContextFromDefaults,
type StorageContext,
} from "llamaindex/storage/StorageContext";
import { existsSync, rmSync } from "node:fs"; import { existsSync, rmSync } from "node:fs";
import { describe, expect, test, vi, vitest } from "vitest"; import {
afterAll,
beforeAll,
describe,
expect,
test,
vi,
vitest,
} from "vitest";
vitest.spyOn(console, "error"); vitest.spyOn(console, "error");
describe("StorageContext", () => { describe("StorageContext", () => {
test("initializes", async () => { let storageContext: StorageContext;
vi.mocked(console.error).mockImplementation(() => {}); // silence console.error
const storageContext = await storageContextFromDefaults({ beforeAll(async () => {
storageContext = await storageContextFromDefaults({
persistDir: "/tmp/test_dir", persistDir: "/tmp/test_dir",
}); });
});
test("initializes", async () => {
vi.mocked(console.error).mockImplementation(() => {}); // silence console.error
expect(existsSync("/tmp/test_dir")).toBe(true); expect(existsSync("/tmp/test_dir")).toBe(true);
expect(storageContext).toBeDefined(); expect(storageContext).toBeDefined();
});
// cleanup afterAll(() => {
rmSync("/tmp/test_dir", { recursive: true }); rmSync("/tmp/test_dir", { recursive: true });
}); });
}); });
import type { ServiceContext } from "llamaindex";
import { import {
Document, Document,
OpenAI,
OpenAIEmbedding,
SummaryIndex, SummaryIndex,
VectorStoreIndex, VectorStoreIndex,
serviceContextFromDefaults,
storageContextFromDefaults, storageContextFromDefaults,
type ServiceContext,
type StorageContext,
} from "llamaindex"; } from "llamaindex";
import { beforeAll, describe, expect, it, vi } from "vitest"; import { rmSync } from "node:fs";
import { import { afterAll, beforeAll, describe, expect, it } from "vitest";
mockEmbeddingModel, import { mockServiceContext } from "../utility/mockServiceContext.js";
mockLlmGeneration,
} from "../utility/mockOpenAI.js";
// Mock the OpenAI getOpenAISession function during testing
vi.mock("llamaindex/llm/open_ai", () => {
return {
getOpenAISession: vi.fn().mockImplementation(() => null),
};
});
describe("SummaryIndex", () => { describe("SummaryIndex", () => {
let serviceContext: ServiceContext; let serviceContext: ServiceContext;
let storageContext: StorageContext;
beforeAll(() => { beforeAll(async () => {
const embeddingModel = new OpenAIEmbedding(); serviceContext = mockServiceContext();
const llm = new OpenAI(); storageContext = await storageContextFromDefaults({
persistDir: "/tmp/test_dir",
mockEmbeddingModel(embeddingModel);
mockLlmGeneration({ languageModel: llm });
const ctx = serviceContextFromDefaults({
embedModel: embeddingModel,
llm,
}); });
serviceContext = ctx;
}); });
it("SummaryIndex and VectorStoreIndex must be able to share the same storage context", async () => { it("SummaryIndex and VectorStoreIndex must be able to share the same storage context", async () => {
const storageContext = await storageContextFromDefaults({
persistDir: "/tmp/test_dir",
});
const documents = [new Document({ text: "lorem ipsem", id_: "1" })]; const documents = [new Document({ text: "lorem ipsem", id_: "1" })];
const vectorIndex = await VectorStoreIndex.fromDocuments(documents, { const vectorIndex = await VectorStoreIndex.fromDocuments(documents, {
serviceContext, serviceContext,
...@@ -55,4 +34,8 @@ describe("SummaryIndex", () => { ...@@ -55,4 +34,8 @@ describe("SummaryIndex", () => {
}); });
expect(summaryIndex).toBeDefined(); expect(summaryIndex).toBeDefined();
}); });
afterAll(() => {
rmSync("/tmp/test_dir", { recursive: true });
});
}); });
import type { ServiceContext, StorageContext } from "llamaindex";
import {
Document,
VectorStoreIndex,
storageContextFromDefaults,
} from "llamaindex";
import { beforeAll, describe, expect, test } from "vitest";
import { mockServiceContext } from "../utility/mockServiceContext.js";
describe.sequential("VectorStoreIndex", () => {
let serviceContext: ServiceContext;
let storageContext: StorageContext;
let testStrategy: (
// strategy?: DocStoreStrategy,
runs?: number,
) => Promise<Array<number>>;
beforeAll(async () => {
serviceContext = mockServiceContext();
storageContext = await storageContextFromDefaults({
persistDir: "/tmp/test_dir",
});
testStrategy = async (
// strategy?: DocStoreStrategy,
runs: number = 2,
): Promise<Array<number>> => {
const documents = [new Document({ text: "lorem ipsem", id_: "1" })];
const entries = [];
for (let i = 0; i < runs; i++) {
await VectorStoreIndex.fromDocuments(documents, {
serviceContext,
storageContext,
// docStoreStrategy: strategy,
});
const docs = await storageContext.docStore.docs();
entries.push(Object.keys(docs).length);
}
return entries;
};
});
test("fromDocuments does not stores duplicates per default", async () => {
const entries = await testStrategy();
expect(entries[0]).toBe(entries[1]);
});
// test("fromDocuments ignores duplicates in upserts", async () => {
// const entries = await testStrategy(DocStoreStrategy.DUPLICATES_ONLY);
// expect(entries[0]).toBe(entries[1]);
// });
// afterAll(() => {
// rmSync("/tmp/test_dir", { recursive: true });
// });
});
...@@ -2,40 +2,17 @@ import type { ServiceContext } from "llamaindex"; ...@@ -2,40 +2,17 @@ import type { ServiceContext } from "llamaindex";
import { import {
FunctionTool, FunctionTool,
ObjectIndex, ObjectIndex,
OpenAI,
OpenAIEmbedding,
SimpleToolNodeMapping, SimpleToolNodeMapping,
VectorStoreIndex, VectorStoreIndex,
serviceContextFromDefaults,
} from "llamaindex"; } from "llamaindex";
import { beforeAll, describe, expect, test, vi } from "vitest"; import { beforeAll, describe, expect, test } from "vitest";
import { import { mockServiceContext } from "../utility/mockServiceContext.js";
mockEmbeddingModel,
mockLlmGeneration,
} from "../utility/mockOpenAI.js";
vi.mock("llamaindex/llm/open_ai", () => {
return {
getOpenAISession: vi.fn().mockImplementation(() => null),
};
});
describe("ObjectIndex", () => { describe("ObjectIndex", () => {
let serviceContext: ServiceContext; let serviceContext: ServiceContext;
beforeAll(() => { beforeAll(() => {
const embeddingModel = new OpenAIEmbedding(); serviceContext = mockServiceContext();
const llm = new OpenAI();
mockEmbeddingModel(embeddingModel);
mockLlmGeneration({ languageModel: llm });
const ctx = serviceContextFromDefaults({
embedModel: embeddingModel,
llm,
});
serviceContext = ctx;
}); });
test("test_object_with_tools", async () => { test("test_object_with_tools", async () => {
......
import {
OpenAI,
OpenAIEmbedding,
serviceContextFromDefaults,
} from "llamaindex";
import {
mockEmbeddingModel,
mockLlmGeneration,
} from "../utility/mockOpenAI.js";
import { vi } from "vitest";
// Mock the OpenAI getOpenAISession function during testing
vi.mock("llamaindex/llm/open_ai", () => {
return {
getOpenAISession: vi.fn().mockImplementation(() => null),
};
});
export function mockServiceContext() {
const embeddingModel = new OpenAIEmbedding();
const llm = new OpenAI();
mockEmbeddingModel(embeddingModel);
mockLlmGeneration({ languageModel: llm });
return serviceContextFromDefaults({
embedModel: embeddingModel,
llm,
});
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment