diff --git a/packages/core/src/indices/summary/index.ts b/packages/core/src/indices/summary/index.ts
index 59d10ae4cd1f6815f46c183f56714f6af57ac2e8..d1be94e98377b235ab7b7474c0b52fb76b3d6e75 100644
--- a/packages/core/src/indices/summary/index.ts
+++ b/packages/core/src/indices/summary/index.ts
@@ -75,7 +75,8 @@ export class SummaryIndex extends BaseIndex<IndexList> {
     if (options.indexStruct) {
       indexStruct = options.indexStruct;
     } else if (indexStructs.length == 1) {
-      indexStruct = indexStructs[0];
+      indexStruct =
+        indexStructs[0].type === IndexStructType.LIST ? indexStructs[0] : null;
     } else if (indexStructs.length > 1 && options.indexId) {
       indexStruct = (await indexStore.getIndexStruct(
         options.indexId,
diff --git a/packages/core/src/indices/vectorStore/index.ts b/packages/core/src/indices/vectorStore/index.ts
index 199b07d91a7edcf94c59eb9886c228cb0931a108..ee7ee9ad822ee4c420e7952705f229c19c97af19 100644
--- a/packages/core/src/indices/vectorStore/index.ts
+++ b/packages/core/src/indices/vectorStore/index.ts
@@ -145,6 +145,10 @@ export class VectorStoreIndex extends BaseIndex<IndexDict> {
     if (options.indexStruct) {
       indexStruct = options.indexStruct;
     } else if (indexStructs.length == 1) {
+      indexStruct =
+        indexStructs[0].type === IndexStructType.SIMPLE_DICT
+          ? indexStructs[0]
+          : undefined;
       indexStruct = indexStructs[0];
     } else if (indexStructs.length > 1 && options.indexId) {
       indexStruct = (await indexStore.getIndexStruct(
diff --git a/packages/core/tests/indices/SummaryIndex.test.ts b/packages/core/tests/indices/SummaryIndex.test.ts
new file mode 100644
index 0000000000000000000000000000000000000000..6b87058fe61c0989062baea42af00e24b63629f0
--- /dev/null
+++ b/packages/core/tests/indices/SummaryIndex.test.ts
@@ -0,0 +1,58 @@
+import type { ServiceContext } from "llamaindex";
+import {
+  Document,
+  OpenAI,
+  OpenAIEmbedding,
+  SummaryIndex,
+  VectorStoreIndex,
+  serviceContextFromDefaults,
+  storageContextFromDefaults,
+} from "llamaindex";
+import { beforeAll, describe, expect, it, vi } from "vitest";
+import {
+  mockEmbeddingModel,
+  mockLlmGeneration,
+} from "../utility/mockOpenAI.js";
+
+// Mock the OpenAI getOpenAISession function during testing
+vi.mock("llamaindex/llm/open_ai", () => {
+  return {
+    getOpenAISession: vi.fn().mockImplementation(() => null),
+  };
+});
+
+describe("SummaryIndex", () => {
+  let serviceContext: ServiceContext;
+
+  beforeAll(() => {
+    const embeddingModel = new OpenAIEmbedding();
+    const llm = new OpenAI();
+
+    mockEmbeddingModel(embeddingModel);
+    mockLlmGeneration({ languageModel: llm });
+
+    const ctx = serviceContextFromDefaults({
+      embedModel: embeddingModel,
+      llm,
+    });
+
+    serviceContext = ctx;
+  });
+
+  it("SummaryIndex and VectorStoreIndex must be able to share the same storage context", async () => {
+    const storageContext = await storageContextFromDefaults({
+      persistDir: "/tmp/test_dir",
+    });
+    const documents = [new Document({ text: "lorem ipsem", id_: "1" })];
+    const vectorIndex = await VectorStoreIndex.fromDocuments(documents, {
+      serviceContext,
+      storageContext,
+    });
+    expect(vectorIndex).toBeDefined();
+    const summaryIndex = await SummaryIndex.fromDocuments(documents, {
+      serviceContext,
+      storageContext,
+    });
+    expect(summaryIndex).toBeDefined();
+  });
+});