diff --git a/packages/core/tests/StorageContext.test.ts b/packages/core/tests/StorageContext.test.ts
index c1702a90534907869f339d69873bc1329a49c981..bfbb4924ff75093701d39aa2efb6dc08763cfb47 100644
--- a/packages/core/tests/StorageContext.test.ts
+++ b/packages/core/tests/StorageContext.test.ts
@@ -1,21 +1,37 @@
-import { storageContextFromDefaults } from "llamaindex/storage/StorageContext";
+import {
+  storageContextFromDefaults,
+  type StorageContext,
+} from "llamaindex/storage/StorageContext";
 import { existsSync, rmSync } from "node:fs";
-import { describe, expect, test, vi, vitest } from "vitest";
+import {
+  afterAll,
+  beforeAll,
+  describe,
+  expect,
+  test,
+  vi,
+  vitest,
+} from "vitest";
 
 vitest.spyOn(console, "error");
 
 describe("StorageContext", () => {
-  test("initializes", async () => {
-    vi.mocked(console.error).mockImplementation(() => {}); // silence console.error
+  let storageContext: StorageContext;
 
-    const storageContext = await storageContextFromDefaults({
+  beforeAll(async () => {
+    storageContext = await storageContextFromDefaults({
       persistDir: "/tmp/test_dir",
     });
+  });
+
+  test("initializes", async () => {
+    vi.mocked(console.error).mockImplementation(() => {}); // silence console.error
 
     expect(existsSync("/tmp/test_dir")).toBe(true);
     expect(storageContext).toBeDefined();
+  });
 
-    // cleanup
+  afterAll(() => {
     rmSync("/tmp/test_dir", { recursive: true });
   });
 });
diff --git a/packages/core/tests/indices/SummaryIndex.test.ts b/packages/core/tests/indices/SummaryIndex.test.ts
index 6b87058fe61c0989062baea42af00e24b63629f0..846a61e9db4a1e1ce3a62ec062cfb4bcf07defd9 100644
--- a/packages/core/tests/indices/SummaryIndex.test.ts
+++ b/packages/core/tests/indices/SummaryIndex.test.ts
@@ -1,48 +1,27 @@
-import type { ServiceContext } from "llamaindex";
 import {
   Document,
-  OpenAI,
-  OpenAIEmbedding,
   SummaryIndex,
   VectorStoreIndex,
-  serviceContextFromDefaults,
   storageContextFromDefaults,
+  type ServiceContext,
+  type StorageContext,
 } from "llamaindex";
-import { beforeAll, describe, expect, it, vi } from "vitest";
-import {
-  mockEmbeddingModel,
-  mockLlmGeneration,
-} from "../utility/mockOpenAI.js";
-
-// Mock the OpenAI getOpenAISession function during testing
-vi.mock("llamaindex/llm/open_ai", () => {
-  return {
-    getOpenAISession: vi.fn().mockImplementation(() => null),
-  };
-});
+import { rmSync } from "node:fs";
+import { afterAll, beforeAll, describe, expect, it } from "vitest";
+import { mockServiceContext } from "../utility/mockServiceContext.js";
 
 describe("SummaryIndex", () => {
   let serviceContext: ServiceContext;
+  let storageContext: StorageContext;
 
-  beforeAll(() => {
-    const embeddingModel = new OpenAIEmbedding();
-    const llm = new OpenAI();
-
-    mockEmbeddingModel(embeddingModel);
-    mockLlmGeneration({ languageModel: llm });
-
-    const ctx = serviceContextFromDefaults({
-      embedModel: embeddingModel,
-      llm,
+  beforeAll(async () => {
+    serviceContext = mockServiceContext();
+    storageContext = await storageContextFromDefaults({
+      persistDir: "/tmp/test_dir",
     });
-
-    serviceContext = ctx;
   });
 
   it("SummaryIndex and VectorStoreIndex must be able to share the same storage context", async () => {
-    const storageContext = await storageContextFromDefaults({
-      persistDir: "/tmp/test_dir",
-    });
     const documents = [new Document({ text: "lorem ipsem", id_: "1" })];
     const vectorIndex = await VectorStoreIndex.fromDocuments(documents, {
       serviceContext,
@@ -55,4 +34,8 @@ describe("SummaryIndex", () => {
     });
     expect(summaryIndex).toBeDefined();
   });
+
+  afterAll(() => {
+    rmSync("/tmp/test_dir", { recursive: true });
+  });
 });
diff --git a/packages/core/tests/indices/VectorStoreIndex.test.ts b/packages/core/tests/indices/VectorStoreIndex.test.ts
new file mode 100644
index 0000000000000000000000000000000000000000..4e9f3694a563fb2a0a389788f9b2629e2bb2ed7b
--- /dev/null
+++ b/packages/core/tests/indices/VectorStoreIndex.test.ts
@@ -0,0 +1,55 @@
+import type { ServiceContext, StorageContext } from "llamaindex";
+import {
+  Document,
+  VectorStoreIndex,
+  storageContextFromDefaults,
+} from "llamaindex";
+import { beforeAll, describe, expect, test } from "vitest";
+import { mockServiceContext } from "../utility/mockServiceContext.js";
+
+describe.sequential("VectorStoreIndex", () => {
+  let serviceContext: ServiceContext;
+  let storageContext: StorageContext;
+  let testStrategy: (
+    // strategy?: DocStoreStrategy,
+    runs?: number,
+  ) => Promise<Array<number>>;
+
+  beforeAll(async () => {
+    serviceContext = mockServiceContext();
+    storageContext = await storageContextFromDefaults({
+      persistDir: "/tmp/test_dir",
+    });
+    testStrategy = async (
+      // strategy?: DocStoreStrategy,
+      runs: number = 2,
+    ): Promise<Array<number>> => {
+      const documents = [new Document({ text: "lorem ipsem", id_: "1" })];
+      const entries = [];
+      for (let i = 0; i < runs; i++) {
+        await VectorStoreIndex.fromDocuments(documents, {
+          serviceContext,
+          storageContext,
+          // docStoreStrategy: strategy,
+        });
+        const docs = await storageContext.docStore.docs();
+        entries.push(Object.keys(docs).length);
+      }
+      return entries;
+    };
+  });
+
+  test("fromDocuments does not stores duplicates per default", async () => {
+    const entries = await testStrategy();
+    expect(entries[0]).toBe(entries[1]);
+  });
+
+  // test("fromDocuments ignores duplicates in upserts", async () => {
+  //   const entries = await testStrategy(DocStoreStrategy.DUPLICATES_ONLY);
+  //   expect(entries[0]).toBe(entries[1]);
+  // });
+
+  // afterAll(() => {
+  //   rmSync("/tmp/test_dir", { recursive: true });
+  // });
+});
diff --git a/packages/core/tests/objects/ObjectIndex.test.ts b/packages/core/tests/objects/ObjectIndex.test.ts
index b65e767650c4fd545ae00fb460fdfcc2c788c927..08203f50fd98d504b33c1a4a49b58d1dca7da963 100644
--- a/packages/core/tests/objects/ObjectIndex.test.ts
+++ b/packages/core/tests/objects/ObjectIndex.test.ts
@@ -2,40 +2,17 @@ import type { ServiceContext } from "llamaindex";
 import {
   FunctionTool,
   ObjectIndex,
-  OpenAI,
-  OpenAIEmbedding,
   SimpleToolNodeMapping,
   VectorStoreIndex,
-  serviceContextFromDefaults,
 } from "llamaindex";
-import { beforeAll, describe, expect, test, vi } from "vitest";
-import {
-  mockEmbeddingModel,
-  mockLlmGeneration,
-} from "../utility/mockOpenAI.js";
-
-vi.mock("llamaindex/llm/open_ai", () => {
-  return {
-    getOpenAISession: vi.fn().mockImplementation(() => null),
-  };
-});
+import { beforeAll, describe, expect, test } from "vitest";
+import { mockServiceContext } from "../utility/mockServiceContext.js";
 
 describe("ObjectIndex", () => {
   let serviceContext: ServiceContext;
 
   beforeAll(() => {
-    const embeddingModel = new OpenAIEmbedding();
-    const llm = new OpenAI();
-
-    mockEmbeddingModel(embeddingModel);
-    mockLlmGeneration({ languageModel: llm });
-
-    const ctx = serviceContextFromDefaults({
-      embedModel: embeddingModel,
-      llm,
-    });
-
-    serviceContext = ctx;
+    serviceContext = mockServiceContext();
   });
 
   test("test_object_with_tools", async () => {
diff --git a/packages/core/tests/utility/mockServiceContext.ts b/packages/core/tests/utility/mockServiceContext.ts
new file mode 100644
index 0000000000000000000000000000000000000000..03b0893b82a50216f40f280516a4a213ae8f8134
--- /dev/null
+++ b/packages/core/tests/utility/mockServiceContext.ts
@@ -0,0 +1,32 @@
+import {
+  OpenAI,
+  OpenAIEmbedding,
+  serviceContextFromDefaults,
+} from "llamaindex";
+
+import {
+  mockEmbeddingModel,
+  mockLlmGeneration,
+} from "../utility/mockOpenAI.js";
+
+import { vi } from "vitest";
+
+// Mock the OpenAI getOpenAISession function during testing
+vi.mock("llamaindex/llm/open_ai", () => {
+  return {
+    getOpenAISession: vi.fn().mockImplementation(() => null),
+  };
+});
+
+export function mockServiceContext() {
+  const embeddingModel = new OpenAIEmbedding();
+  const llm = new OpenAI();
+
+  mockEmbeddingModel(embeddingModel);
+  mockLlmGeneration({ languageModel: llm });
+
+  return serviceContextFromDefaults({
+    embedModel: embeddingModel,
+    llm,
+  });
+}