diff --git a/packages/autotool/examples/01_node/src/index.ts b/packages/autotool/examples/01_node/src/index.ts
index 04a310b275e422bd855c56238d3ff1e556857e17..7a8e3d857077b45ebe36d69ff1a79912c6cec8fe 100644
--- a/packages/autotool/examples/01_node/src/index.ts
+++ b/packages/autotool/examples/01_node/src/index.ts
@@ -16,7 +16,7 @@ const openai = new OpenAI();
     stream: false,
   });
 
-  const toolCalls = response.choices[0].message.tool_calls ?? [];
+  const toolCalls = response.choices[0]!.message.tool_calls ?? [];
   for (const toolCall of toolCalls) {
     toolCall.function.name;
   }
diff --git a/packages/autotool/src/index.ts b/packages/autotool/src/index.ts
index 19f546bb62dd888aea23b02ee2255acc5b4fd20b..837e66439fd6a71bb675761367c0fc67d25f4af8 100644
--- a/packages/autotool/src/index.ts
+++ b/packages/autotool/src/index.ts
@@ -16,11 +16,16 @@ const openaiToolsAtom = atom<ChatCompletionTool[]>((get) => {
   const metadata = get(toolMetadataAtom);
   return metadata.map(([metadata]) => ({
     type: "function",
-    function: {
-      parameters: metadata.parameters,
-      name: metadata.name,
-      description: metadata.description,
-    },
+    function: metadata.parameters
+      ? {
+          parameters: metadata.parameters,
+          name: metadata.name,
+          description: metadata.description,
+        }
+      : {
+          name: metadata.name,
+          description: metadata.description,
+        },
   }));
 });
 
diff --git a/packages/autotool/src/internal/index.ts b/packages/autotool/src/internal/index.ts
index b05b6527ac9b7513ee2a2055e5ea0ba881c8945a..592aa61f0dda27e808f420b16aa4bb57f80f3307 100644
--- a/packages/autotool/src/internal/index.ts
+++ b/packages/autotool/src/internal/index.ts
@@ -17,7 +17,7 @@ export type Info = {
  * @internal
  */
 export type InfoString = {
-  originalFunction?: string;
+  originalFunction: string | undefined;
   parameterMapping: Record<string, number>;
 };
 
diff --git a/packages/cloud/openapi-ts.config.ts b/packages/cloud/openapi-ts.config.ts
index 97dc9e0c287b2b072f52498b89e49ef672f1c2d5..af78eba9383a92249694b52ad7d244575f2306c6 100644
--- a/packages/cloud/openapi-ts.config.ts
+++ b/packages/cloud/openapi-ts.config.ts
@@ -4,6 +4,7 @@ export default defineConfig({
   // you can download this file to get the latest version of the OpenAPI document
   // @link https://api.cloud.llamaindex.ai/api/openapi.json
   input: "./openapi.json",
+  client: "@hey-api/client-fetch",
   output: {
     path: "./src/client",
     format: "prettier",
diff --git a/packages/cloud/package.json b/packages/cloud/package.json
index a07737a111be30243ab200b7005a152f3fe83dfe..bc86a4e775110352e88d51bfcb996d8ffed43049 100644
--- a/packages/cloud/package.json
+++ b/packages/cloud/package.json
@@ -4,7 +4,7 @@
   "type": "module",
   "license": "MIT",
   "scripts": {
-    "generate": "pnpm dlx @hey-api/openapi-ts@0.49.0",
+    "generate": "pnpx @hey-api/openapi-ts@0.53.0",
     "build": "pnpm run generate && bunchee"
   },
   "files": [
@@ -34,7 +34,8 @@
     "directory": "packages/cloud"
   },
   "devDependencies": {
-    "@hey-api/openapi-ts": "^0.52.11",
+    "@hey-api/client-fetch": "^0.2.4",
+    "@hey-api/openapi-ts": "^0.53.0",
     "bunchee": "5.3.2"
   }
 }
diff --git a/packages/community/src/llm/bedrock/anthropic/utils.ts b/packages/community/src/llm/bedrock/anthropic/utils.ts
index 2d59260321a4f3fd6f2103e21afd491e264a64c5..372443694662590c016228eaadbadfe620927943 100644
--- a/packages/community/src/llm/bedrock/anthropic/utils.ts
+++ b/packages/community/src/llm/bedrock/anthropic/utils.ts
@@ -28,9 +28,9 @@ export const mergeNeighboringSameRoleMessages = (
 ): AnthropicMessage[] => {
   return messages.reduce(
     (result: AnthropicMessage[], current: AnthropicMessage, index: number) => {
-      if (index > 0 && messages[index - 1].role === current.role) {
-        result[result.length - 1].content = [
-          ...result[result.length - 1].content,
+      if (index > 0 && messages[index - 1]!.role === current.role) {
+        result[result.length - 1]!.content = [
+          ...result[result.length - 1]!.content,
           ...current.content,
         ];
       } else {
@@ -128,7 +128,7 @@ export const mapChatMessagesToAnthropicMessages = <
       );
     })
     .filter((message: AnthropicMessage) => {
-      const content = message.content[0];
+      const content = message.content[0]!;
       if (content.type === "text" && !content.text) return false;
       if (content.type === "image" && !content.source.data) return false;
       if (content.type === "image" && message.role === "assistant")
@@ -151,12 +151,12 @@ export const extractDataUrlComponents = (
 } => {
   const parts = dataUrl.split(";base64,");
 
-  if (parts.length !== 2 || !parts[0].startsWith("data:")) {
+  if (parts.length !== 2 || !parts[0]!.startsWith("data:")) {
     throw new Error("Invalid data URL");
   }
 
-  const mimeType = parts[0].slice(5);
-  const base64 = parts[1];
+  const mimeType = parts[0]!.slice(5);
+  const base64 = parts[1]!;
 
   return {
     mimeType,
diff --git a/packages/community/src/llm/bedrock/index.ts b/packages/community/src/llm/bedrock/index.ts
index c0a110dd5117edbdafafbdd85dd73e31692b6150..2c8ab79aa81ea649aad696152be95ab0b0a29eb9 100644
--- a/packages/community/src/llm/bedrock/index.ts
+++ b/packages/community/src/llm/bedrock/index.ts
@@ -153,12 +153,15 @@ export const TOOL_CALL_MODELS = [
 
 const getProvider = (model: string): Provider => {
   const providerName = model.split(".")[0];
+  if (!providerName) {
+    throw new Error(`Model ${model} is not supported`);
+  }
   if (!(providerName in PROVIDERS)) {
     throw new Error(
       `Provider ${providerName} for model ${model} is not supported`,
     );
   }
-  return PROVIDERS[providerName];
+  return PROVIDERS[providerName]!;
 };
 
 export type BedrockModelParams = {
diff --git a/packages/community/src/llm/bedrock/meta/provider.ts b/packages/community/src/llm/bedrock/meta/provider.ts
index 5b7f78ac8373f2553bde56da3cfee8fbe5fd57fc..fb1875a6c0793a4a3ecafc13660831947adbb3b5 100644
--- a/packages/community/src/llm/bedrock/meta/provider.ts
+++ b/packages/community/src/llm/bedrock/meta/provider.ts
@@ -34,7 +34,7 @@ export class MetaProvider extends Provider<MetaStreamEvent> {
     const result = this.getResultFromResponse(response);
     if (!result.generation.trim().startsWith(TOKENS.TOOL_CALL)) return [];
     const tool = JSON.parse(
-      result.generation.trim().split(TOKENS.TOOL_CALL)[1],
+      result.generation.trim().split(TOKENS.TOOL_CALL)[1]!,
     );
     return [
       {
diff --git a/packages/core/src/embeddings/base.ts b/packages/core/src/embeddings/base.ts
index 7ce0645fcc43faf914420505ac287cc04eef9021..d8f66d215ca98991b200a9cb18939dfb662faf62 100644
--- a/packages/core/src/embeddings/base.ts
+++ b/packages/core/src/embeddings/base.ts
@@ -34,7 +34,7 @@ export abstract class BaseEmbedding extends TransformComponent {
         const embeddings = await this.getTextEmbeddingsBatch(texts, options);
 
         for (let i = 0; i < nodes.length; i++) {
-          nodes[i].embedding = embeddings[i];
+          nodes[i]!.embedding = embeddings[i];
         }
 
         return nodes;
@@ -120,7 +120,7 @@ export async function batchEmbeddings<T>(
   const curBatch: T[] = [];
 
   for (let i = 0; i < queue.length; i++) {
-    curBatch.push(queue[i]);
+    curBatch.push(queue[i]!);
     if (i == queue.length - 1 || curBatch.length == chunkSize) {
       const embeddings = await embedFunc(curBatch);
 
diff --git a/packages/core/src/embeddings/utils.ts b/packages/core/src/embeddings/utils.ts
index c6c439815c2d9510e79f5ddf6052006f7b0318c3..0e292d2556de76ef0ec1eafa657fac0d3932c791 100644
--- a/packages/core/src/embeddings/utils.ts
+++ b/packages/core/src/embeddings/utils.ts
@@ -35,20 +35,20 @@ export function similarity(
   function norm(x: number[]): number {
     let result = 0;
     for (let i = 0; i < x.length; i++) {
-      result += x[i] * x[i];
+      result += x[i]! * x[i]!;
     }
     return Math.sqrt(result);
   }
 
   switch (mode) {
     case SimilarityType.EUCLIDEAN: {
-      const difference = embedding1.map((x, i) => x - embedding2[i]);
+      const difference = embedding1.map((x, i) => x - embedding2[i]!);
       return -norm(difference);
     }
     case SimilarityType.DOT_PRODUCT: {
       let result = 0;
       for (let i = 0; i < embedding1.length; i++) {
-        result += embedding1[i] * embedding2[i];
+        result += embedding1[i]! * embedding2[i]!;
       }
       return result;
     }
diff --git a/packages/core/src/indices/prompt-helper.ts b/packages/core/src/indices/prompt-helper.ts
index 473885efd9f4006bf64dd4756b8f6f4debbb32d4..0ae144ae0ebe0a3b072063a0ecb6905867985310 100644
--- a/packages/core/src/indices/prompt-helper.ts
+++ b/packages/core/src/indices/prompt-helper.ts
@@ -23,12 +23,12 @@ function getEmptyPromptTxt(prompt: PromptTemplate) {
  * Get biggest empty prompt size from a list of prompts.
  * Used to calculate the maximum size of inputs to the LLM.
  */
-export function getBiggestPrompt(prompts: PromptTemplate[]) {
+export function getBiggestPrompt(prompts: PromptTemplate[]): PromptTemplate {
   const emptyPromptTexts = prompts.map(getEmptyPromptTxt);
   const emptyPromptLengths = emptyPromptTexts.map((text) => text.length);
   const maxEmptyPromptLength = Math.max(...emptyPromptLengths);
   const maxEmptyPromptIndex = emptyPromptLengths.indexOf(maxEmptyPromptLength);
-  return prompts[maxEmptyPromptIndex];
+  return prompts[maxEmptyPromptIndex]!;
 }
 
 export type PromptHelperOptions = {
@@ -47,7 +47,7 @@ export class PromptHelper {
   contextWindow = DEFAULT_CONTEXT_WINDOW;
   numOutput = DEFAULT_NUM_OUTPUTS;
   chunkOverlapRatio = DEFAULT_CHUNK_OVERLAP_RATIO;
-  chunkSizeLimit?: number;
+  chunkSizeLimit: number | undefined;
   tokenizer: Tokenizer;
   separator = " ";
 
diff --git a/packages/core/src/llms/type.ts b/packages/core/src/llms/type.ts
index cffe627f8ebdc6dd08af32c836f31ee1a6f91741..b7c05f35e44752fd496eecafc96b23c56881f4ce 100644
--- a/packages/core/src/llms/type.ts
+++ b/packages/core/src/llms/type.ts
@@ -103,7 +103,7 @@ export type LLMMetadata = {
   model: string;
   temperature: number;
   topP: number;
-  maxTokens?: number;
+  maxTokens?: number | undefined;
   contextWindow: number;
   tokenizer: Tokenizers | undefined;
 };
@@ -141,7 +141,7 @@ export interface LLMCompletionParamsStreaming extends LLMCompletionParamsBase {
 
 export interface LLMCompletionParamsNonStreaming
   extends LLMCompletionParamsBase {
-  stream?: false | null;
+  stream?: false | null | undefined;
 }
 
 export type MessageContentTextDetail = {
diff --git a/packages/core/src/node-parser/base.ts b/packages/core/src/node-parser/base.ts
index b4eb9b5ba5f33b83af0c0285fa733f5e5c87f8df..5a9943ac28ad71e6012ebbf3435bd166279a2ddb 100644
--- a/packages/core/src/node-parser/base.ts
+++ b/packages/core/src/node-parser/base.ts
@@ -122,7 +122,7 @@ export abstract class MetadataAwareTextSplitter extends TextSplitter {
       throw new TypeError("`texts` and `metadata` must have the same length");
     }
     return texts.flatMap((text, i) =>
-      this.splitTextMetadataAware(text, metadata[i]),
+      this.splitTextMetadataAware(text, metadata[i]!),
     );
   }
 
diff --git a/packages/core/src/node-parser/markdown.ts b/packages/core/src/node-parser/markdown.ts
index c082c6e6c6f8d380f045f2e25dfb1f200a34ad43..12c08294438d98d6259c5af3ff4bb7eb1357e81d 100644
--- a/packages/core/src/node-parser/markdown.ts
+++ b/packages/core/src/node-parser/markdown.ts
@@ -35,8 +35,8 @@ export class MarkdownNodeParser extends NodeParser {
         }
         metadata = this.updateMetadata(
           metadata,
-          headerMatch[2],
-          headerMatch[1].trim().length,
+          headerMatch[2]!,
+          headerMatch[1]!.trim().length,
         );
         currentSection = `${headerMatch[2]}\n`;
       } else {
@@ -63,7 +63,7 @@ export class MarkdownNodeParser extends NodeParser {
     for (let i = 1; i < newHeaderLevel; i++) {
       const key = `Header_${i}`;
       if (key in headersMetadata) {
-        updatedHeaders[key] = headersMetadata[key];
+        updatedHeaders[key] = headersMetadata[key]!;
       }
     }
 
@@ -76,10 +76,10 @@ export class MarkdownNodeParser extends NodeParser {
     node: TextNode,
     metadata: Metadata,
   ): TextNode {
-    const newNode = buildNodeFromSplits([textSplit], node, undefined)[0];
+    const newNode = buildNodeFromSplits([textSplit], node, undefined)[0]!;
 
     if (this.includeMetadata) {
-      newNode.metadata = { ...newNode.metadata, ...metadata };
+      newNode.metadata = { ...newNode!.metadata, ...metadata };
     }
 
     return newNode;
diff --git a/packages/core/src/node-parser/sentence-splitter.ts b/packages/core/src/node-parser/sentence-splitter.ts
index 7091bcfd7d6db1ae30cae595d8f709070e324864..49c9fe241f502267be9736cd2541b800554f5011 100644
--- a/packages/core/src/node-parser/sentence-splitter.ts
+++ b/packages/core/src/node-parser/sentence-splitter.ts
@@ -168,9 +168,9 @@ export class SentenceSplitter extends MetadataAwareTextSplitter {
       let lastIndex = lastChunk.length - 1;
       while (
         lastIndex >= 0 &&
-        currentChunkLength + lastChunk[lastIndex][1] <= this.chunkOverlap
+        currentChunkLength + lastChunk[lastIndex]![1] <= this.chunkOverlap
       ) {
-        const [text, length] = lastChunk[lastIndex];
+        const [text, length] = lastChunk[lastIndex]!;
         currentChunkLength += length;
         currentChunk.unshift([text, length]);
         lastIndex -= 1;
@@ -178,7 +178,7 @@ export class SentenceSplitter extends MetadataAwareTextSplitter {
     };
 
     while (splits.length > 0) {
-      const curSplit = splits[0];
+      const curSplit = splits[0]!;
       if (curSplit.tokenSize > chunkSize) {
         throw new Error("Single token exceeded chunk size");
       }
diff --git a/packages/core/src/node-parser/utils.ts b/packages/core/src/node-parser/utils.ts
index 5f31b23162c04a22ddf0b7ba1fe304081103f76b..eb5a7cbf32a1fb17c497cd83a84d50fb364fecba 100644
--- a/packages/core/src/node-parser/utils.ts
+++ b/packages/core/src/node-parser/utils.ts
@@ -5,7 +5,7 @@ export type TextSplitterFn = (text: string) => string[];
 
 const truncateText = (text: string, textSplitter: TextSplitter): string => {
   const chunks = textSplitter.splitText(text);
-  return chunks[0];
+  return chunks[0] ?? text;
 };
 
 const splitTextKeepSeparator = (text: string, separator: string): string[] => {
diff --git a/packages/core/src/prompts/base.ts b/packages/core/src/prompts/base.ts
index 40312f5ac92639de707a2723447552924748d64d..0ec65ba3791d0f0a0e9a018d5ce8adcc5a4e157d 100644
--- a/packages/core/src/prompts/base.ts
+++ b/packages/core/src/prompts/base.ts
@@ -18,7 +18,7 @@ export type BasePromptTemplateOptions<
     // loose type for better type inference
     | readonly string[];
   options?: Partial<Record<TemplatesVar[number] | (string & {}), string>>;
-  outputParser?: BaseOutputParser;
+  outputParser?: BaseOutputParser | undefined;
   templateVarMappings?: Partial<
     Record<Vars[number] | (string & {}), TemplatesVar[number] | (string & {})>
   >;
@@ -34,7 +34,7 @@ export abstract class BasePromptTemplate<
   metadata: Metadata = {};
   templateVars: Set<string> = new Set();
   options: Partial<Record<TemplatesVar[number] | (string & {}), string>> = {};
-  outputParser?: BaseOutputParser;
+  outputParser: BaseOutputParser | undefined;
   templateVarMappings: Partial<
     Record<Vars[number] | (string & {}), TemplatesVar[number] | (string & {})>
   > = {};
diff --git a/packages/core/src/prompts/mixin.ts b/packages/core/src/prompts/mixin.ts
index 76c9dd7239af5c401e5c86d337d240c2d78f7fff..0fe3782732a835367c59537b648c101c8bfde9d9 100644
--- a/packages/core/src/prompts/mixin.ts
+++ b/packages/core/src/prompts/mixin.ts
@@ -45,21 +45,21 @@ export abstract class PromptMixin {
 
     for (const key in prompts) {
       if (key.includes(":")) {
-        const [module_name, sub_key] = key.split(":");
+        const [moduleName, subKey] = key.split(":") as [string, string];
 
-        if (!subPrompt[module_name]) {
-          subPrompt[module_name] = {};
+        if (!subPrompt[moduleName]) {
+          subPrompt[moduleName] = {};
         }
-        subPrompt[module_name][sub_key] = prompts[key];
+        subPrompt[moduleName][subKey] = prompts[key]!;
       }
     }
 
-    for (const [module_name, subPromptDict] of Object.entries(subPrompt)) {
-      if (!promptModules[module_name]) {
-        throw new Error(`Module ${module_name} not found.`);
+    for (const [moduleName, subPromptDict] of Object.entries(subPrompt)) {
+      if (!promptModules[moduleName]) {
+        throw new Error(`Module ${moduleName} not found.`);
       }
 
-      const moduleToUpdate = promptModules[module_name];
+      const moduleToUpdate = promptModules[moduleName];
 
       moduleToUpdate.updatePrompts(subPromptDict);
     }
diff --git a/packages/core/src/schema/node.ts b/packages/core/src/schema/node.ts
index 50ab9a973324f9826a89ac2780faf4ea5c735ab6..cf43dd184f027f803c4ff0f988f7f8aac115725b 100644
--- a/packages/core/src/schema/node.ts
+++ b/packages/core/src/schema/node.ts
@@ -38,13 +38,15 @@ export type RelatedNodeType<T extends Metadata = Metadata> =
   | RelatedNodeInfo<T>[];
 
 export type BaseNodeParams<T extends Metadata = Metadata> = {
-  id_?: string;
-  metadata?: T;
-  excludedEmbedMetadataKeys?: string[];
-  excludedLlmMetadataKeys?: string[];
-  relationships?: Partial<Record<NodeRelationship, RelatedNodeType<T>>>;
-  hash?: string;
-  embedding?: number[];
+  id_?: string | undefined;
+  metadata?: T | undefined;
+  excludedEmbedMetadataKeys?: string[] | undefined;
+  excludedLlmMetadataKeys?: string[] | undefined;
+  relationships?:
+    | Partial<Record<NodeRelationship, RelatedNodeType<T>>>
+    | undefined;
+  hash?: string | undefined;
+  embedding?: number[] | undefined;
 };
 
 /**
@@ -58,7 +60,7 @@ export abstract class BaseNode<T extends Metadata = Metadata> {
    * Set to a UUID by default.
    */
   id_: string;
-  embedding?: number[];
+  embedding: number[] | undefined;
 
   // Metadata fields
   metadata: T;
@@ -198,11 +200,11 @@ export abstract class BaseNode<T extends Metadata = Metadata> {
 
 export type TextNodeParams<T extends Metadata = Metadata> =
   BaseNodeParams<T> & {
-    text?: string;
-    textTemplate?: string;
-    startCharIdx?: number;
-    endCharIdx?: number;
-    metadataSeparator?: string;
+    text?: string | undefined;
+    textTemplate?: string | undefined;
+    startCharIdx?: number | undefined;
+    endCharIdx?: number | undefined;
+    metadataSeparator?: string | undefined;
   };
 
 /**
@@ -418,7 +420,7 @@ export class ImageDocument<T extends Metadata = Metadata> extends ImageNode<T> {
  */
 export interface NodeWithScore<T extends Metadata = Metadata> {
   node: BaseNode<T>;
-  score?: number;
+  score?: number | undefined;
 }
 
 export enum ModalityType {
diff --git "a/packages/core/src/schema/type/engine\342\200\223response.ts" "b/packages/core/src/schema/type/engine\342\200\223response.ts"
index c2de9e5149fab60c11b8e2b36dba0590b58a4d74..4f7086414ac27db73fc4b5752663ec1fc680bafa 100644
--- "a/packages/core/src/schema/type/engine\342\200\223response.ts"
+++ "b/packages/core/src/schema/type/engine\342\200\223response.ts"
@@ -3,7 +3,7 @@ import { extractText } from "../../utils";
 import type { Metadata, NodeWithScore } from "../node";
 
 export class EngineResponse implements ChatResponse, ChatResponseChunk {
-  sourceNodes?: NodeWithScore[];
+  sourceNodes: NodeWithScore[] | undefined;
 
   metadata: Metadata = {};
 
diff --git a/packages/core/src/utils/llms.ts b/packages/core/src/utils/llms.ts
index 664e1ff3a323dd4ea3aec3136ea7592d37562cef..1620477e1b36e33d4a9d1d478a8c80d4eb2c8f73 100644
--- a/packages/core/src/utils/llms.ts
+++ b/packages/core/src/utils/llms.ts
@@ -74,12 +74,12 @@ export const extractDataUrlComponents = (
 } => {
   const parts = dataUrl.split(";base64,");
 
-  if (parts.length !== 2 || !parts[0].startsWith("data:")) {
+  if (parts.length !== 2 || !parts[0]!.startsWith("data:")) {
     throw new Error("Invalid data URL");
   }
 
-  const mimeType = parts[0].slice(5);
-  const base64 = parts[1];
+  const mimeType = parts[0]!.slice(5);
+  const base64 = parts[1]!;
 
   return {
     mimeType,
diff --git a/packages/core/tests/node-parser/markdown.test.ts b/packages/core/tests/node-parser/markdown.test.ts
index 4ebea6411552801ed4894849d5b65c7425ea7da9..35c5c63a66a2df3169afb1e5e9aae23ea72bec94 100644
--- a/packages/core/tests/node-parser/markdown.test.ts
+++ b/packages/core/tests/node-parser/markdown.test.ts
@@ -19,12 +19,12 @@ Header 2 content
     ]);
 
     expect(splits.length).toBe(2);
-    expect(splits[0].metadata).toEqual({ Header_1: "Main Header" });
-    expect(splits[1].metadata).toEqual({ Header_1: "Header 2" });
-    expect(splits[0].getContent(MetadataMode.NONE)).toStrictEqual(
+    expect(splits[0]!.metadata).toEqual({ Header_1: "Main Header" });
+    expect(splits[1]!.metadata).toEqual({ Header_1: "Header 2" });
+    expect(splits[0]!.getContent(MetadataMode.NONE)).toStrictEqual(
       "Main Header\n\nHeader 1 content",
     );
-    expect(splits[1].getContent(MetadataMode.NONE)).toStrictEqual(
+    expect(splits[1]!.getContent(MetadataMode.NONE)).toStrictEqual(
       "Header 2\nHeader 2 content",
     );
   });
@@ -89,16 +89,16 @@ Content
       }),
     ]);
     expect(splits.length).toBe(4);
-    expect(splits[0].metadata).toEqual({ Header_1: "Main Header" });
-    expect(splits[1].metadata).toEqual({
+    expect(splits[0]!.metadata).toEqual({ Header_1: "Main Header" });
+    expect(splits[1]!.metadata).toEqual({
       Header_1: "Main Header",
       Header_2: "Sub-header",
     });
-    expect(splits[2].metadata).toEqual({
+    expect(splits[2]!.metadata).toEqual({
       Header_1: "Main Header",
       Header_2: "Sub-header",
       Header_3: "Sub-sub header",
     });
-    expect(splits[3].metadata).toEqual({ Header_1: "New title" });
+    expect(splits[3]!.metadata).toEqual({ Header_1: "New title" });
   });
 });
diff --git a/packages/core/tests/node-parser/sentence-spiller.test.ts b/packages/core/tests/node-parser/sentence-spiller.test.ts
index d29bbee2c3477d77c1c1b076be4753db10f045cf..60c7931e0c89e5f2434cdd776ff7bb04fa2b0b16 100644
--- a/packages/core/tests/node-parser/sentence-spiller.test.ts
+++ b/packages/core/tests/node-parser/sentence-spiller.test.ts
@@ -22,7 +22,7 @@ describe("SentenceSplitter", () => {
     });
     const result = sentenceSplitter.getNodesFromDocuments([doc]);
     expect(result.length).toEqual(1);
-    const node = result[0];
+    const node = result[0]!;
     // check not the same object
     expect(node.metadata).not.toBe(doc.metadata);
     expect(node.excludedLlmMetadataKeys).not.toBe(doc.excludedLlmMetadataKeys);
diff --git a/packages/llamaindex/e2e/fixtures/embeddings/OpenAIEmbedding.ts b/packages/llamaindex/e2e/fixtures/embeddings/OpenAIEmbedding.ts
index c93aefd4e459f45267cbbb15d77782a5edca1b13..2ff111e2e05b4532a924c5184f558557df6c33f7 100644
--- a/packages/llamaindex/e2e/fixtures/embeddings/OpenAIEmbedding.ts
+++ b/packages/llamaindex/e2e/fixtures/embeddings/OpenAIEmbedding.ts
@@ -11,7 +11,7 @@ export class OpenAIEmbedding
   extends TransformComponent
   implements BaseEmbedding
 {
-  embedInfo?: EmbeddingInfo | undefined;
+  embedInfo?: EmbeddingInfo;
   embedBatchSize = 512;
 
   constructor() {
diff --git a/packages/llamaindex/e2e/fixtures/llm/openai.ts b/packages/llamaindex/e2e/fixtures/llm/openai.ts
index 46ceb0ca9a75772dbee7aa04ec794160a46fdcd4..2b54919f5e5ad7f70762770886a3f4d7ead2fd66 100644
--- a/packages/llamaindex/e2e/fixtures/llm/openai.ts
+++ b/packages/llamaindex/e2e/fixtures/llm/openai.ts
@@ -48,8 +48,8 @@ export class OpenAI implements LLM {
         llmCompleteMockStorage.llmEventStart.shift()!["messages"];
       strictEqual(params.messages.length, chatMessage.length);
       for (let i = 0; i < chatMessage.length; i++) {
-        strictEqual(params.messages[i].role, chatMessage[i].role);
-        deepStrictEqual(params.messages[i].content, chatMessage[i].content);
+        strictEqual(params.messages[i]!.role, chatMessage[i]!.role);
+        deepStrictEqual(params.messages[i]!.content, chatMessage[i]!.content);
       }
 
       if (llmCompleteMockStorage.llmEventEnd.length > 0) {
@@ -64,7 +64,7 @@ export class OpenAI implements LLM {
                 if (idx === -1) {
                   break;
                 }
-                const chunk = llmCompleteMockStorage.llmEventStream[idx].chunk;
+                const chunk = llmCompleteMockStorage.llmEventStream[idx]!.chunk;
                 llmCompleteMockStorage.llmEventStream.splice(idx, 1);
                 yield chunk;
               }
@@ -90,8 +90,8 @@ export class OpenAI implements LLM {
       const chatMessage =
         llmCompleteMockStorage.llmEventStart.shift()!["messages"];
       strictEqual(1, chatMessage.length);
-      strictEqual("user", chatMessage[0].role);
-      strictEqual(params.prompt, chatMessage[0].content);
+      strictEqual("user", chatMessage[0]!.role);
+      strictEqual(params.prompt, chatMessage[0]!.content);
     }
     if (llmCompleteMockStorage.llmEventEnd.length > 0) {
       const response = llmCompleteMockStorage.llmEventEnd.shift()!["response"];
diff --git a/packages/llamaindex/e2e/node/openai.e2e.ts b/packages/llamaindex/e2e/node/openai.e2e.ts
index 63f0a1e3fd812542c4fbfb8cfc84fa350d351eef..4019390854c1059306639e61b99a542a695695c1 100644
--- a/packages/llamaindex/e2e/node/openai.e2e.ts
+++ b/packages/llamaindex/e2e/node/openai.e2e.ts
@@ -163,11 +163,11 @@ For questions about more specific sections, please use the vector_tool.`,
     }),
   ];
 
-  const originalCall = queryEngineTools[1].call.bind(queryEngineTools[1]);
+  const originalCall = queryEngineTools[1]!.call.bind(queryEngineTools[1]);
   const mockCall = t.mock.fn(({ query }: { query: string }) => {
     return originalCall({ query });
   });
-  queryEngineTools[1].call = mockCall;
+  queryEngineTools[1]!.call = mockCall;
 
   const toolMapping = SimpleToolNodeMapping.fromObjects(queryEngineTools);
 
diff --git a/packages/llamaindex/src/ChatHistory.ts b/packages/llamaindex/src/ChatHistory.ts
index 16a9f27562e7d6c5d49742b20448c780bb69166d..f681e5261eb88762305a8ec78c9c65d1c26d86f7 100644
--- a/packages/llamaindex/src/ChatHistory.ts
+++ b/packages/llamaindex/src/ChatHistory.ts
@@ -42,7 +42,7 @@ export class SimpleChatHistory extends ChatHistory {
   messages: ChatMessage[];
   private messagesBefore: number;
 
-  constructor(init?: Partial<SimpleChatHistory>) {
+  constructor(init?: { messages?: ChatMessage[] | undefined }) {
     super();
     this.messages = init?.messages ?? [];
     this.messagesBefore = this.messages.length;
@@ -118,7 +118,7 @@ export class SummaryChatHistory extends ChatHistory {
       // remove oldest message until the chat history is short enough for the context window
       messagesToSummarize.shift();
     } while (
-      this.tokenizer.encode(promptMessages[0].content).length >
+      this.tokenizer.encode(promptMessages[0]!.content).length >
       this.tokensToSummarize
     );
 
@@ -146,7 +146,7 @@ export class SummaryChatHistory extends ChatHistory {
 
   public getLastSummary(): ChatMessage | null {
     const lastSummaryIndex = this.getLastSummaryIndex();
-    return lastSummaryIndex ? this.messages[lastSummaryIndex] : null;
+    return lastSummaryIndex ? this.messages[lastSummaryIndex]! : null;
   }
 
   private get systemMessages() {
@@ -174,10 +174,10 @@ export class SummaryChatHistory extends ChatHistory {
       // and convert summary message so it can be send to the LLM
       const summaryMessage: ChatMessage = transformSummary
         ? {
-            content: `Summary of the conversation so far: ${this.messages[lastSummaryIndex].content}`,
+            content: `Summary of the conversation so far: ${this.messages[lastSummaryIndex]!.content}`,
             role: "system",
           }
-        : this.messages[lastSummaryIndex];
+        : this.messages[lastSummaryIndex]!;
       return [summaryMessage, ...this.messages.slice(lastSummaryIndex + 1)];
     }
   }
diff --git a/packages/llamaindex/src/Retriever.ts b/packages/llamaindex/src/Retriever.ts
index 02e5d1b4aa02b3f33edda620e366d8286f0a5eca..b7ef4cf0462b23ec9674e865d2e1450780aa1a9d 100644
--- a/packages/llamaindex/src/Retriever.ts
+++ b/packages/llamaindex/src/Retriever.ts
@@ -13,6 +13,8 @@ export type RetrieveParams = {
 export interface BaseRetriever {
   retrieve(params: RetrieveParams): Promise<NodeWithScore[]>;
 
-  // to be deprecated soon
-  serviceContext?: ServiceContext;
+  /**
+   * @deprecated to be deprecated soon
+   */
+  serviceContext?: ServiceContext | undefined;
 }
diff --git a/packages/llamaindex/src/Settings.ts b/packages/llamaindex/src/Settings.ts
index 628271c409c065d8d4dab601ba71397f50465d46..dbd677ec5e960e953ca7296b949be40c9afddcdf 100644
--- a/packages/llamaindex/src/Settings.ts
+++ b/packages/llamaindex/src/Settings.ts
@@ -31,8 +31,8 @@ export interface Config {
   embedModel: BaseEmbedding | null;
   nodeParser: NodeParser | null;
   callbackManager: CallbackManager | null;
-  chunkSize?: number;
-  chunkOverlap?: number;
+  chunkSize: number | undefined;
+  chunkOverlap: number | undefined;
 }
 
 /**
@@ -156,7 +156,9 @@ class GlobalSettings implements Config {
   }
 
   set chunkOverlap(chunkOverlap: number | undefined) {
-    this.#chunkOverlap = chunkOverlap;
+    if (typeof chunkOverlap === "number") {
+      this.#chunkOverlap = chunkOverlap;
+    }
   }
 
   withChunkOverlap<Result>(chunkOverlap: number, fn: () => Result): Result {
diff --git a/packages/llamaindex/src/agent/base.ts b/packages/llamaindex/src/agent/base.ts
index 6971b022e4eec803b0a6c2b3b39c9ef4177b3907..b320fa9287da4cfefe836d4f9309e9b476fe142d 100644
--- a/packages/llamaindex/src/agent/base.ts
+++ b/packages/llamaindex/src/agent/base.ts
@@ -53,7 +53,7 @@ export function createTaskOutputStream<
         nextSteps: new Set(),
       };
       if (steps.length > 0) {
-        step.prevStep = steps[steps.length - 1];
+        step.prevStep = steps[steps.length - 1]!;
       }
       const taskOutputs: TaskStepOutput<
         Model,
@@ -77,7 +77,7 @@ export function createTaskOutputStream<
       context.logger.log("Finished step(id, %s).", step.id);
       // fixme: support multi-thread when there are multiple outputs
       // todo: for now we pretend there is only one task output
-      const { isLast, taskStep } = taskOutputs[0];
+      const { isLast, taskStep } = taskOutputs[0]!;
       context = {
         ...taskStep.context,
         store: {
diff --git a/packages/llamaindex/src/agent/react.ts b/packages/llamaindex/src/agent/react.ts
index 47bdc9c39fe2c2b60b90c332b5f3666070fa04fb..cec254f89117e2e9b11d84168fdfe1ab46bfe6d3 100644
--- a/packages/llamaindex/src/agent/react.ts
+++ b/packages/llamaindex/src/agent/react.ts
@@ -89,8 +89,8 @@ function extractFinalResponse(
     );
   }
 
-  const thought = match[1].trim();
-  const answer = match[2].trim();
+  const thought = match[1]!.trim();
+  const answer = match[2]!.trim();
   return [thought, answer];
 }
 
@@ -108,9 +108,9 @@ function extractToolUse(
     );
   }
 
-  const thought = match[1].trim();
-  const action = match[2].trim();
-  const actionInput = match[3].trim();
+  const thought = match[1]!.trim();
+  const action = match[2]!.trim();
+  const actionInput = match[3]!.trim();
   return [thought, action, actionInput];
 }
 
diff --git a/packages/llamaindex/src/cloud/LLamaCloudFileService.ts b/packages/llamaindex/src/cloud/LLamaCloudFileService.ts
index e4471273740c856a01e4b2b5207a653df7829119..48da789cede9adab83e51d8bef38add80a97aa71 100644
--- a/packages/llamaindex/src/cloud/LLamaCloudFileService.ts
+++ b/packages/llamaindex/src/cloud/LLamaCloudFileService.ts
@@ -12,9 +12,14 @@ export class LLamaCloudFileService {
   public static async getAllProjectsWithPipelines() {
     initService();
     try {
-      const projects = await ProjectsService.listProjectsApiV1ProjectsGet();
-      const pipelines =
-        await PipelinesService.searchPipelinesApiV1PipelinesGet();
+      const { data: projects } =
+        await ProjectsService.listProjectsApiV1ProjectsGet({
+          throwOnError: true,
+        });
+      const { data: pipelines } =
+        await PipelinesService.searchPipelinesApiV1PipelinesGet({
+          throwOnError: true,
+        });
       return projects.map((project) => ({
         ...project,
         pipelines: pipelines.filter((p) => p.project_id === project.id),
@@ -35,11 +40,12 @@ export class LLamaCloudFileService {
     customMetadata: Record<string, any> = {},
   ) {
     initService();
-    const file = await FilesService.uploadFileApiV1FilesPost({
-      projectId,
-      formData: {
+    const { data: file } = await FilesService.uploadFileApiV1FilesPost({
+      path: { project_id: projectId },
+      body: {
         upload_file: uploadFile,
       },
+      throwOnError: true,
     });
     const files = [
       {
@@ -48,19 +54,24 @@ export class LLamaCloudFileService {
       },
     ];
     await PipelinesService.addFilesToPipelineApiV1PipelinesPipelineIdFilesPut({
-      pipelineId,
-      requestBody: files,
+      path: {
+        pipeline_id: pipelineId,
+      },
+      body: files,
     });
 
     // Wait 2s for the file to be processed
     const maxAttempts = 20;
     let attempt = 0;
     while (attempt < maxAttempts) {
-      const result =
+      const { data: result } =
         await PipelinesService.getPipelineFileStatusApiV1PipelinesPipelineIdFilesFileIdStatusGet(
           {
-            pipelineId,
-            fileId: file.id,
+            path: {
+              pipeline_id: pipelineId,
+              file_id: file.id,
+            },
+            throwOnError: true,
           },
         );
       if (result.status === "ERROR") {
@@ -83,16 +94,24 @@ export class LLamaCloudFileService {
    */
   public static async getFileUrl(pipelineId: string, filename: string) {
     initService();
-    const allPipelineFiles =
+    const { data: allPipelineFiles } =
       await PipelinesService.listPipelineFilesApiV1PipelinesPipelineIdFilesGet({
-        pipelineId,
+        path: {
+          pipeline_id: pipelineId,
+        },
+        throwOnError: true,
       });
     const file = allPipelineFiles.find((file) => file.name === filename);
     if (!file?.file_id) return null;
-    const fileContent =
+    const { data: fileContent } =
       await FilesService.readFileContentApiV1FilesIdContentGet({
-        id: file.file_id,
-        projectId: file.project_id,
+        path: {
+          id: file.file_id,
+        },
+        query: {
+          project_id: file.project_id,
+        },
+        throwOnError: true,
       });
     return fileContent.url;
   }
diff --git a/packages/llamaindex/src/cloud/LlamaCloudIndex.ts b/packages/llamaindex/src/cloud/LlamaCloudIndex.ts
index d29b52e31ba60a339a907f6cd0666775b4c76f42..7e83ab1360501cf170eea56aa2143cfd7cb6ecd3 100644
--- a/packages/llamaindex/src/cloud/LlamaCloudIndex.ts
+++ b/packages/llamaindex/src/cloud/LlamaCloudIndex.ts
@@ -38,10 +38,13 @@ export class LlamaCloudIndex {
     }
 
     while (true) {
-      const pipelineStatus =
+      const { data: pipelineStatus } =
         await PipelinesService.getPipelineStatusApiV1PipelinesPipelineIdStatusGet(
           {
-            pipelineId,
+            path: {
+              pipeline_id: pipelineId,
+            },
+            throwOnError: true,
           },
         );
 
@@ -90,9 +93,14 @@ export class LlamaCloudIndex {
       const docsToRemove = new Set<string>();
 
       for (const doc of pendingDocs) {
-        const { status } =
+        const {
+          data: { status },
+        } =
           await PipelinesService.getPipelineDocumentStatusApiV1PipelinesPipelineIdDocumentsDocumentIdStatusGet(
-            { pipelineId, documentId: doc },
+            {
+              path: { pipeline_id: pipelineId, document_id: doc },
+              throwOnError: true,
+            },
           );
 
         if (status === "NOT_STARTED" || status === "IN_PROGRESS") {
@@ -136,12 +144,16 @@ export class LlamaCloudIndex {
     name?: string,
     projectName?: string,
   ): Promise<string> {
-    const pipelines = await PipelinesService.searchPipelinesApiV1PipelinesGet({
-      projectId: await this.getProjectId(projectName),
-      pipelineName: name ?? this.params.name,
-    });
-
-    return pipelines[0].id;
+    const { data: pipelines } =
+      await PipelinesService.searchPipelinesApiV1PipelinesGet({
+        path: {
+          project_id: await this.getProjectId(projectName),
+          project_name: name ?? this.params.name,
+        },
+        throwOnError: true,
+      });
+
+    return pipelines[0]!.id;
   }
 
   public async getProjectId(
@@ -177,26 +189,37 @@ export class LlamaCloudIndex {
       transformations: params.transformations ?? defaultTransformations,
     });
 
-    const project = await ProjectsService.upsertProjectApiV1ProjectsPut({
-      organizationId: params.organizationId,
-      requestBody: {
-        name: params.projectName ?? "default",
-      },
-    });
+    const { data: project } =
+      await ProjectsService.upsertProjectApiV1ProjectsPut({
+        path: {
+          organization_id: params.organizationId,
+        },
+        body: {
+          name: params.projectName ?? "default",
+        },
+        throwOnError: true,
+      });
 
     if (!project.id) {
       throw new Error("Project ID should be defined");
     }
 
-    const pipeline = await PipelinesService.upsertPipelineApiV1PipelinesPut({
-      projectId: project.id,
-      requestBody: {
-        name: params.name,
-        configured_transformations:
-          pipelineCreateParams.configured_transformations,
-        pipeline_type: pipelineCreateParams.pipeline_type,
-      },
-    });
+    const { data: pipeline } =
+      await PipelinesService.upsertPipelineApiV1PipelinesPut({
+        path: {
+          project_id: project.id,
+        },
+        body: pipelineCreateParams.configured_transformations
+          ? {
+              name: params.name,
+              configured_transformations:
+                pipelineCreateParams.configured_transformations,
+            }
+          : {
+              name: params.name,
+            },
+        throwOnError: true,
+      });
 
     if (!pipeline.id) {
       throw new Error("Pipeline ID must be defined");
@@ -208,8 +231,10 @@ export class LlamaCloudIndex {
 
     await PipelinesService.upsertBatchPipelineDocumentsApiV1PipelinesPipelineIdDocumentsPut(
       {
-        pipelineId: pipeline.id,
-        requestBody: params.documents.map((doc) => ({
+        path: {
+          pipeline_id: pipeline.id,
+        },
+        body: params.documents.map((doc) => ({
           metadata: doc.metadata,
           text: doc.text,
           excluded_embed_metadata_keys: doc.excludedEmbedMetadataKeys,
@@ -220,10 +245,11 @@ export class LlamaCloudIndex {
     );
 
     while (true) {
-      const pipelineStatus =
+      const { data: pipelineStatus } =
         await PipelinesService.getPipelineStatusApiV1PipelinesPipelineIdStatusGet(
           {
-            pipelineId: pipeline.id,
+            path: { pipeline_id: pipeline.id },
+            throwOnError: true,
           },
         );
 
@@ -299,8 +325,10 @@ export class LlamaCloudIndex {
 
     await PipelinesService.createBatchPipelineDocumentsApiV1PipelinesPipelineIdDocumentsPost(
       {
-        pipelineId: pipelineId,
-        requestBody: [
+        path: {
+          pipeline_id: pipelineId,
+        },
+        body: [
           {
             metadata: document.metadata,
             text: document.text,
@@ -327,8 +355,10 @@ export class LlamaCloudIndex {
 
     await PipelinesService.deletePipelineDocumentApiV1PipelinesPipelineIdDocumentsDocumentIdDelete(
       {
-        pipelineId,
-        documentId: document.id_,
+        path: {
+          pipeline_id: pipelineId,
+          document_id: document.id_,
+        },
       },
     );
 
@@ -347,8 +377,10 @@ export class LlamaCloudIndex {
 
     await PipelinesService.upsertBatchPipelineDocumentsApiV1PipelinesPipelineIdDocumentsPut(
       {
-        pipelineId,
-        requestBody: [
+        path: {
+          pipeline_id: pipelineId,
+        },
+        body: [
           {
             metadata: document.metadata,
             text: document.text,
diff --git a/packages/llamaindex/src/cloud/LlamaCloudRetriever.ts b/packages/llamaindex/src/cloud/LlamaCloudRetriever.ts
index a947dfb6022eec99d0379e85109db8963f90b60f..3d41ae76c8758557601b3cfe5c8e52e5eeec592b 100644
--- a/packages/llamaindex/src/cloud/LlamaCloudRetriever.ts
+++ b/packages/llamaindex/src/cloud/LlamaCloudRetriever.ts
@@ -59,20 +59,27 @@ export class LlamaCloudRetriever implements BaseRetriever {
     query,
     preFilters,
   }: RetrieveParams): Promise<NodeWithScore[]> {
-    const pipelines = await PipelinesService.searchPipelinesApiV1PipelinesGet({
-      projectId: await getProjectId(this.projectName, this.organizationId),
-      pipelineName: this.pipelineName,
-    });
+    const { data: pipelines } =
+      await PipelinesService.searchPipelinesApiV1PipelinesGet({
+        query: {
+          project_id: await getProjectId(this.projectName, this.organizationId),
+          project_name: this.pipelineName,
+        },
+        throwOnError: true,
+      });
 
-    if (pipelines.length === 0 || !pipelines[0].id) {
+    if (pipelines.length === 0 || !pipelines[0]!.id) {
       throw new Error(
         `No pipeline found with name ${this.pipelineName} in project ${this.projectName}`,
       );
     }
 
-    const pipeline =
+    const { data: pipeline } =
       await PipelinesService.getPipelineApiV1PipelinesPipelineIdGet({
-        pipelineId: pipelines[0].id,
+        path: {
+          pipeline_id: pipelines[0]!.id,
+        },
+        throwOnError: true,
       });
 
     if (!pipeline) {
@@ -81,15 +88,18 @@ export class LlamaCloudRetriever implements BaseRetriever {
       );
     }
 
-    const results =
+    const { data: results } =
       await PipelinesService.runSearchApiV1PipelinesPipelineIdRetrievePost({
-        pipelineId: pipeline.id,
-        requestBody: {
+        throwOnError: true,
+        path: {
+          pipeline_id: pipeline.id,
+        },
+        body: {
           ...this.retrieveParams,
           query: extractText(query),
           search_filters:
             this.retrieveParams.filters ?? (preFilters as MetadataFilters),
-          dense_similarity_top_k: this.retrieveParams.similarityTopK,
+          dense_similarity_top_k: this.retrieveParams.similarityTopK!,
         },
       });
 
diff --git a/packages/llamaindex/src/cloud/type.ts b/packages/llamaindex/src/cloud/type.ts
index cd77474c3fdc8fbd9cdbab6e6a31a6c541507aa5..cee7275cca1cee2de26d4f23be11efbbbe1d6445 100644
--- a/packages/llamaindex/src/cloud/type.ts
+++ b/packages/llamaindex/src/cloud/type.ts
@@ -1,10 +1,13 @@
 import type { ServiceContext } from "../ServiceContext.js";
 
-export type ClientParams = { apiKey?: string; baseUrl?: string };
+export type ClientParams = {
+  apiKey?: string | undefined;
+  baseUrl?: string | undefined;
+};
 
 export type CloudConstructorParams = {
   name: string;
   projectName: string;
-  organizationId?: string;
-  serviceContext?: ServiceContext;
+  organizationId?: string | undefined;
+  serviceContext?: ServiceContext | undefined;
 } & ClientParams;
diff --git a/packages/llamaindex/src/cloud/utils.ts b/packages/llamaindex/src/cloud/utils.ts
index 033a37c7fe89590388febc917b2a65648f316bbe..c5fdcbb3ed89047d52345764c85758e5f5de6d52 100644
--- a/packages/llamaindex/src/cloud/utils.ts
+++ b/packages/llamaindex/src/cloud/utils.ts
@@ -1,4 +1,4 @@
-import { OpenAPI, ProjectsService } from "@llamaindex/cloud/api";
+import { client, ProjectsService } from "@llamaindex/cloud/api";
 import { DEFAULT_BASE_URL } from "@llamaindex/core/global";
 import { getEnv } from "@llamaindex/env";
 import type { ClientParams } from "./type.js";
@@ -8,13 +8,26 @@ function getBaseUrl(baseUrl?: string): string {
 }
 
 export function getAppBaseUrl(): string {
-  return OpenAPI.BASE.replace(/api\./, "");
+  return client.getConfig().baseUrl?.replace(/api\./, "") ?? "";
 }
 
+// fixme: refactor this to init at the top level or module level
+let initOnce = false;
 export function initService({ apiKey, baseUrl }: ClientParams = {}) {
-  OpenAPI.TOKEN = apiKey ?? getEnv("LLAMA_CLOUD_API_KEY");
-  OpenAPI.BASE = getBaseUrl(baseUrl);
-  if (!OpenAPI.TOKEN) {
+  if (initOnce) {
+    return;
+  }
+  initOnce = true;
+  client.setConfig({
+    baseUrl: getBaseUrl(baseUrl),
+    throwOnError: true,
+  });
+  const token = apiKey ?? getEnv("LLAMA_CLOUD_API_KEY");
+  client.interceptors.request.use((request) => {
+    request.headers.set("Authorization", `Bearer ${token}`);
+    return request;
+  });
+  if (!token) {
     throw new Error(
       "API Key is required for LlamaCloudIndex. Please pass the apiKey parameter",
     );
@@ -25,10 +38,15 @@ export async function getProjectId(
   projectName: string,
   organizationId?: string,
 ): Promise<string> {
-  const projects = await ProjectsService.listProjectsApiV1ProjectsGet({
-    projectName: projectName,
-    organizationId: organizationId,
-  });
+  const { data: projects } = await ProjectsService.listProjectsApiV1ProjectsGet(
+    {
+      path: {
+        project_name: projectName,
+        organization_id: organizationId,
+      },
+      throwOnError: true,
+    },
+  );
 
   if (projects.length === 0) {
     throw new Error(
@@ -40,7 +58,7 @@ export async function getProjectId(
     );
   }
 
-  const project = projects[0];
+  const project = projects[0]!;
 
   if (!project.id) {
     throw new Error(`No project found with name ${projectName}`);
diff --git a/packages/llamaindex/src/embeddings/DeepInfraEmbedding.ts b/packages/llamaindex/src/embeddings/DeepInfraEmbedding.ts
index cc38a422a7899995c0da7d67fc729e7204378082..bd0187cba38b359040198eccdf66e21d963b8f16 100644
--- a/packages/llamaindex/src/embeddings/DeepInfraEmbedding.ts
+++ b/packages/llamaindex/src/embeddings/DeepInfraEmbedding.ts
@@ -87,7 +87,7 @@ export class DeepInfraEmbedding extends BaseEmbedding {
   async getTextEmbedding(text: string): Promise<number[]> {
     const texts = mapPrefixWithInputs(this.textPrefix, [text]);
     const embeddings = await this.getDeepInfraEmbedding(texts);
-    return embeddings[0];
+    return embeddings[0]!;
   }
 
   async getQueryEmbedding(
@@ -97,7 +97,7 @@ export class DeepInfraEmbedding extends BaseEmbedding {
     if (text) {
       const queries = mapPrefixWithInputs(this.queryPrefix, [text]);
       const embeddings = await this.getDeepInfraEmbedding(queries);
-      return embeddings[0];
+      return embeddings[0]!;
     } else {
       return null;
     }
diff --git a/packages/llamaindex/src/embeddings/JinaAIEmbedding.ts b/packages/llamaindex/src/embeddings/JinaAIEmbedding.ts
index c154477870afb5f5730b565028d993bc46c10b12..6cf789c6dbfaa6227db8ab567a641f94ed79baea 100644
--- a/packages/llamaindex/src/embeddings/JinaAIEmbedding.ts
+++ b/packages/llamaindex/src/embeddings/JinaAIEmbedding.ts
@@ -37,13 +37,13 @@ export class JinaAIEmbedding extends MultiModalEmbedding {
 
   async getTextEmbedding(text: string): Promise<number[]> {
     const result = await this.getJinaEmbedding({ input: [{ text }] });
-    return result.data[0].embedding;
+    return result.data[0]!.embedding;
   }
 
   async getImageEmbedding(image: ImageType): Promise<number[]> {
     const img = await this.getImageInput(image);
     const result = await this.getJinaEmbedding({ input: [img] });
-    return result.data[0].embedding;
+    return result.data[0]!.embedding;
   }
 
   // Retrieve multiple text embeddings in a single request
@@ -81,7 +81,7 @@ export class JinaAIEmbedding extends MultiModalEmbedding {
   ): Promise<{ bytes: string } | { url: string }> {
     if (isLocal(image) || image instanceof Blob) {
       const base64 = await imageToDataUrl(image);
-      const bytes = base64.split(",")[1];
+      const bytes = base64.split(",")[1]!;
       return { bytes };
     } else {
       return { url: image.toString() };
diff --git a/packages/llamaindex/src/embeddings/MixedbreadAIEmbeddings.ts b/packages/llamaindex/src/embeddings/MixedbreadAIEmbeddings.ts
index 45d5e1955f9746dd47390ab9f68b297044f6e3ad..fcf30db837835e892d6bd94d1829dbb3cfc0123f 100644
--- a/packages/llamaindex/src/embeddings/MixedbreadAIEmbeddings.ts
+++ b/packages/llamaindex/src/embeddings/MixedbreadAIEmbeddings.ts
@@ -105,8 +105,10 @@ export class MixedbreadAIEmbeddings extends BaseEmbedding {
     }
 
     this.embedBatchSize = params?.embedBatchSize ?? 128;
-    this.embedInfo = params?.embedInfo;
-    this.requestParams = {
+    if (params?.embedInfo) {
+      this.embedInfo = params?.embedInfo;
+    }
+    this.requestParams = <EmbeddingsRequestWithoutInput>{
       model: params?.model ?? "mixedbread-ai/mxbai-embed-large-v1",
       normalized: params?.normalized,
       dimensions: params?.dimensions,
@@ -123,10 +125,16 @@ export class MixedbreadAIEmbeddings extends BaseEmbedding {
         "user-agent": "@mixedbread-ai/llamaindex-ts-sdk",
       },
     };
-    this.client = new MixedbreadAIClient({
-      apiKey,
-      environment: params?.baseUrl,
-    });
+    this.client = new MixedbreadAIClient(
+      params?.baseUrl
+        ? {
+            apiKey,
+            environment: params?.baseUrl,
+          }
+        : {
+            apiKey,
+          },
+    );
   }
 
   /**
@@ -140,7 +148,7 @@ export class MixedbreadAIEmbeddings extends BaseEmbedding {
    * console.log(result);
    */
   async getTextEmbedding(text: string): Promise<number[]> {
-    return (await this.getTextEmbeddings([text]))[0];
+    return (await this.getTextEmbeddings([text]))[0]!;
   }
 
   /**
diff --git a/packages/llamaindex/src/embeddings/MultiModalEmbedding.ts b/packages/llamaindex/src/embeddings/MultiModalEmbedding.ts
index 1792c7dbd88d2d8a27c81d986a9fce37fb9e2476..eb275ceab2f6f1f5b4c2880ac9f73daceda43b1b 100644
--- a/packages/llamaindex/src/embeddings/MultiModalEmbedding.ts
+++ b/packages/llamaindex/src/embeddings/MultiModalEmbedding.ts
@@ -39,7 +39,7 @@ export abstract class MultiModalEmbedding extends BaseEmbedding {
       _options,
     );
     for (let i = 0; i < textNodes.length; i++) {
-      textNodes[i].embedding = embeddings[i];
+      textNodes[i]!.embedding = embeddings[i];
     }
 
     const imageEmbeddings = await batchEmbeddings(
@@ -49,7 +49,7 @@ export abstract class MultiModalEmbedding extends BaseEmbedding {
       _options,
     );
     for (let i = 0; i < imageNodes.length; i++) {
-      imageNodes[i].embedding = imageEmbeddings[i];
+      imageNodes[i]!.embedding = imageEmbeddings[i];
     }
 
     return nodes;
diff --git a/packages/llamaindex/src/embeddings/OpenAIEmbedding.ts b/packages/llamaindex/src/embeddings/OpenAIEmbedding.ts
index 2fb3c3b3055b51762248afca42ad8d536df528d9..72386410e1dfd3a97568bf31c6f709ffcaaa0e55 100644
--- a/packages/llamaindex/src/embeddings/OpenAIEmbedding.ts
+++ b/packages/llamaindex/src/embeddings/OpenAIEmbedding.ts
@@ -36,21 +36,20 @@ export class OpenAIEmbedding extends BaseEmbedding {
   /** embeddding model. defaults to "text-embedding-ada-002" */
   model: string;
   /** number of dimensions of the resulting vector, for models that support choosing fewer dimensions. undefined will default to model default */
-  dimensions: number | undefined;
+  dimensions?: number | undefined;
 
   // OpenAI session params
 
   /** api key */
-  apiKey?: string = undefined;
+  apiKey?: string | undefined = undefined;
   /** maximum number of retries, default 10 */
   maxRetries: number;
   /** timeout in ms, default 60 seconds  */
-  timeout?: number;
+  timeout?: number | undefined;
   /** other session options for OpenAI */
-  additionalSessionOptions?: Omit<
-    Partial<OpenAIClientOptions>,
-    "apiKey" | "maxRetries" | "timeout"
-  >;
+  additionalSessionOptions?:
+    | Omit<Partial<OpenAIClientOptions>, "apiKey" | "maxRetries" | "timeout">
+    | undefined;
 
   /** session object */
   session: OpenAISession;
@@ -119,11 +118,18 @@ export class OpenAIEmbedding extends BaseEmbedding {
     // TODO: ensure this for every sub class by calling it in the base class
     input = this.truncateMaxTokens(input);
 
-    const { data } = await this.session.openai.embeddings.create({
-      model: this.model,
-      dimensions: this.dimensions, // only sent to OpenAI if set by user
-      input,
-    });
+    const { data } = await this.session.openai.embeddings.create(
+      this.dimensions
+        ? {
+            model: this.model,
+            dimensions: this.dimensions, // only sent to OpenAI if set by user
+            input,
+          }
+        : {
+            model: this.model,
+            input,
+          },
+    );
 
     return data.map((d) => d.embedding);
   }
@@ -141,6 +147,6 @@ export class OpenAIEmbedding extends BaseEmbedding {
    * @param texts
    */
   async getTextEmbedding(text: string): Promise<number[]> {
-    return (await this.getOpenAIEmbedding([text]))[0];
+    return (await this.getOpenAIEmbedding([text]))[0]!;
   }
 }
diff --git a/packages/llamaindex/src/engines/chat/ContextChatEngine.ts b/packages/llamaindex/src/engines/chat/ContextChatEngine.ts
index c7021cd77e474622344c136f1fe4d97e3bf64f16..41c603f1eb3f7f7b36b3a801c58cf7d770a16c81 100644
--- a/packages/llamaindex/src/engines/chat/ContextChatEngine.ts
+++ b/packages/llamaindex/src/engines/chat/ContextChatEngine.ts
@@ -38,16 +38,16 @@ export class ContextChatEngine extends PromptMixin implements ChatEngine {
   chatModel: LLM;
   chatHistory: ChatHistory;
   contextGenerator: ContextGenerator & PromptMixin;
-  systemPrompt?: string;
+  systemPrompt?: string | undefined;
 
   constructor(init: {
     retriever: BaseRetriever;
-    chatModel?: LLM;
-    chatHistory?: ChatMessage[];
-    contextSystemPrompt?: ContextSystemPrompt;
-    nodePostprocessors?: BaseNodePostprocessor[];
-    systemPrompt?: string;
-    contextRole?: MessageType;
+    chatModel?: LLM | undefined;
+    chatHistory?: ChatMessage[] | undefined;
+    contextSystemPrompt?: ContextSystemPrompt | undefined;
+    nodePostprocessors?: BaseNodePostprocessor[] | undefined;
+    systemPrompt?: string | undefined;
+    contextRole?: MessageType | undefined;
   }) {
     super();
     this.chatModel = init.chatModel ?? Settings.llm;
diff --git a/packages/llamaindex/src/engines/chat/DefaultContextGenerator.ts b/packages/llamaindex/src/engines/chat/DefaultContextGenerator.ts
index 533ed3b539e710c46e0294b8475ed4781df670b4..378f2b05ff48d2ad130ec3aa2d495557efe5ef56 100644
--- a/packages/llamaindex/src/engines/chat/DefaultContextGenerator.ts
+++ b/packages/llamaindex/src/engines/chat/DefaultContextGenerator.ts
@@ -23,10 +23,10 @@ export class DefaultContextGenerator
 
   constructor(init: {
     retriever: BaseRetriever;
-    contextSystemPrompt?: ContextSystemPrompt;
-    nodePostprocessors?: BaseNodePostprocessor[];
-    contextRole?: MessageType;
-    metadataMode?: MetadataMode;
+    contextSystemPrompt?: ContextSystemPrompt | undefined;
+    nodePostprocessors?: BaseNodePostprocessor[] | undefined;
+    contextRole?: MessageType | undefined;
+    metadataMode?: MetadataMode | undefined;
   }) {
     super();
 
diff --git a/packages/llamaindex/src/engines/query/RouterQueryEngine.ts b/packages/llamaindex/src/engines/query/RouterQueryEngine.ts
index 4f3c9b154080ed71bc083ef0d740518cc5d23a6d..e564799ec09b8fdb4a8b3792c32babca8dd2a6c4 100644
--- a/packages/llamaindex/src/engines/query/RouterQueryEngine.ts
+++ b/packages/llamaindex/src/engines/query/RouterQueryEngine.ts
@@ -64,9 +64,9 @@ export class RouterQueryEngine extends PromptMixin implements QueryEngine {
   constructor(init: {
     selector: BaseSelector;
     queryEngineTools: RouterQueryEngineTool[];
-    serviceContext?: ServiceContext;
-    summarizer?: TreeSummarize;
-    verbose?: boolean;
+    serviceContext?: ServiceContext | undefined;
+    summarizer?: TreeSummarize | undefined;
+    verbose?: boolean | undefined;
   }) {
     super();
 
@@ -138,14 +138,14 @@ export class RouterQueryEngine extends PromptMixin implements QueryEngine {
     if (result.selections.length > 1) {
       const responses: EngineResponse[] = [];
       for (let i = 0; i < result.selections.length; i++) {
-        const engineInd = result.selections[i];
-        const logStr = `Selecting query engine ${engineInd.index}: ${result.selections[i].index}.`;
+        const engineInd = result.selections[i]!;
+        const logStr = `Selecting query engine ${engineInd.index}: ${result.selections[i]!.index}.`;
 
         if (this.verbose) {
           console.log(logStr + "\n");
         }
 
-        const selectedQueryEngine = this.queryEngines[engineInd.index];
+        const selectedQueryEngine = this.queryEngines[engineInd.index]!;
         responses.push(
           await selectedQueryEngine.query({
             query: extractText(query),
@@ -163,15 +163,15 @@ export class RouterQueryEngine extends PromptMixin implements QueryEngine {
 
         return finalResponse;
       } else {
-        return responses[0];
+        return responses[0]!;
       }
     } else {
       let selectedQueryEngine;
 
       try {
-        selectedQueryEngine = this.queryEngines[result.selections[0].index];
+        selectedQueryEngine = this.queryEngines[result.selections[0]!.index];
 
-        const logStr = `Selecting query engine ${result.selections[0].index}: ${result.selections[0].reason}`;
+        const logStr = `Selecting query engine ${result.selections[0]!.index}: ${result.selections[0]!.reason}`;
 
         if (this.verbose) {
           console.log(logStr + "\n");
diff --git a/packages/llamaindex/src/evaluation/Faithfulness.ts b/packages/llamaindex/src/evaluation/Faithfulness.ts
index dce2b50f3e9b36e8a9c3f8e35df92a301b9410dc..b1e68f1f4ce6109476d955d4ded33233daff7f96 100644
--- a/packages/llamaindex/src/evaluation/Faithfulness.ts
+++ b/packages/llamaindex/src/evaluation/Faithfulness.ts
@@ -22,16 +22,16 @@ export class FaithfulnessEvaluator
   extends PromptMixin
   implements BaseEvaluator
 {
-  private serviceContext?: ServiceContext;
+  private serviceContext?: ServiceContext | undefined;
   private raiseError: boolean;
   private evalTemplate: FaithfulnessTextQAPrompt;
   private refineTemplate: FaithfulnessRefinePrompt;
 
   constructor(params?: {
-    serviceContext?: ServiceContext;
-    raiseError?: boolean;
-    faithfulnessSystemPrompt?: FaithfulnessTextQAPrompt;
-    faithFulnessRefinePrompt?: FaithfulnessRefinePrompt;
+    serviceContext?: ServiceContext | undefined;
+    raiseError?: boolean | undefined;
+    faithfulnessSystemPrompt?: FaithfulnessTextQAPrompt | undefined;
+    faithFulnessRefinePrompt?: FaithfulnessRefinePrompt | undefined;
   }) {
     super();
     this.serviceContext = params?.serviceContext;
diff --git a/packages/llamaindex/src/evaluation/Relevancy.ts b/packages/llamaindex/src/evaluation/Relevancy.ts
index d06c45897b6c67610f8e6de577bf8d149b1c32c9..da22741b8bd4ecaee4bddacb4097c4066c03c8ba 100644
--- a/packages/llamaindex/src/evaluation/Relevancy.ts
+++ b/packages/llamaindex/src/evaluation/Relevancy.ts
@@ -16,14 +16,14 @@ import type {
 } from "./types.js";
 
 type RelevancyParams = {
-  serviceContext?: ServiceContext;
-  raiseError?: boolean;
-  evalTemplate?: RelevancyEvalPrompt;
-  refineTemplate?: RelevancyRefinePrompt;
+  serviceContext?: ServiceContext | undefined;
+  raiseError?: boolean | undefined;
+  evalTemplate?: RelevancyEvalPrompt | undefined;
+  refineTemplate?: RelevancyRefinePrompt | undefined;
 };
 
 export class RelevancyEvaluator extends PromptMixin implements BaseEvaluator {
-  private serviceContext?: ServiceContext;
+  private serviceContext?: ServiceContext | undefined;
   private raiseError: boolean;
 
   private evalTemplate: RelevancyEvalPrompt;
diff --git a/packages/llamaindex/src/evaluation/utils.ts b/packages/llamaindex/src/evaluation/utils.ts
index c968a82424e1f4f0aec791bdb7eeaefa0b6cc828..da57f2cc3099c7988f24b1b87c37aefb95c2742f 100644
--- a/packages/llamaindex/src/evaluation/utils.ts
+++ b/packages/llamaindex/src/evaluation/utils.ts
@@ -1,7 +1,7 @@
 export const defaultEvaluationParser = (
   evalResponse: string,
 ): [number, string] => {
-  const [scoreStr, reasoningStr] = evalResponse.split("\n");
+  const [scoreStr, reasoningStr] = evalResponse.split("\n") as [string, string];
   const score = parseFloat(scoreStr);
   const reasoning = reasoningStr.trim();
   return [score, reasoning];
diff --git a/packages/llamaindex/src/extractors/MetadataExtractors.ts b/packages/llamaindex/src/extractors/MetadataExtractors.ts
index 5ab760209c9a173aa57ebd57900aebed76b06b77..b7ca6c49f9c8fef157ef172b2c22ddaa6134d828 100644
--- a/packages/llamaindex/src/extractors/MetadataExtractors.ts
+++ b/packages/llamaindex/src/extractors/MetadataExtractors.ts
@@ -173,7 +173,7 @@ export class TitleExtractor extends BaseExtractor {
 
     return nodesToExtractTitle.map((node) => {
       return {
-        documentTitle: titlesByDocument[node.sourceNode?.nodeId ?? ""],
+        documentTitle: titlesByDocument[node.sourceNode?.nodeId ?? ""]!,
       };
     });
   }
diff --git a/packages/llamaindex/src/extractors/types.ts b/packages/llamaindex/src/extractors/types.ts
index 0bf97b3505a4bb95b46331848986fb5b885b81b3..ae8dbce602e7aa889951435c1111a0bdc9d9674a 100644
--- a/packages/llamaindex/src/extractors/types.ts
+++ b/packages/llamaindex/src/extractors/types.ts
@@ -52,20 +52,20 @@ export abstract class BaseExtractor extends TransformComponent {
     const curMetadataList = await this.extract(newNodes);
 
     for (const idx in newNodes) {
-      newNodes[idx].metadata = {
-        ...newNodes[idx].metadata,
+      newNodes[idx]!.metadata = {
+        ...newNodes[idx]!.metadata,
         ...curMetadataList[idx],
       };
     }
 
     for (const idx in newNodes) {
       if (excludedEmbedMetadataKeys) {
-        newNodes[idx].excludedEmbedMetadataKeys.concat(
+        newNodes[idx]!.excludedEmbedMetadataKeys.concat(
           excludedEmbedMetadataKeys,
         );
       }
       if (excludedLlmMetadataKeys) {
-        newNodes[idx].excludedLlmMetadataKeys.concat(excludedLlmMetadataKeys);
+        newNodes[idx]!.excludedLlmMetadataKeys.concat(excludedLlmMetadataKeys);
       }
       if (!this.disableTemplateRewrite) {
         if (newNodes[idx] instanceof TextNode) {
diff --git a/packages/llamaindex/src/indices/BaseIndex.ts b/packages/llamaindex/src/indices/BaseIndex.ts
index e5b7468f7f6451943d68f1c30a8348b06e9a1dbb..97ed433361387a00bd39cb28a3107fed11f73bcc 100644
--- a/packages/llamaindex/src/indices/BaseIndex.ts
+++ b/packages/llamaindex/src/indices/BaseIndex.ts
@@ -43,10 +43,10 @@ export class KeywordTable extends IndexStruct {
 }
 
 export interface BaseIndexInit<T> {
-  serviceContext?: ServiceContext;
+  serviceContext?: ServiceContext | undefined;
   storageContext: StorageContext;
   docStore: BaseDocumentStore;
-  indexStore?: BaseIndexStore;
+  indexStore?: BaseIndexStore | undefined;
   indexStruct: T;
 }
 
@@ -55,10 +55,10 @@ export interface BaseIndexInit<T> {
  * they can be retrieved for our queries.
  */
 export abstract class BaseIndex<T> {
-  serviceContext?: ServiceContext;
+  serviceContext?: ServiceContext | undefined;
   storageContext: StorageContext;
   docStore: BaseDocumentStore;
-  indexStore?: BaseIndexStore;
+  indexStore?: BaseIndexStore | undefined;
   indexStruct: T;
 
   constructor(init: BaseIndexInit<T>) {
diff --git a/packages/llamaindex/src/indices/IndexStruct.ts b/packages/llamaindex/src/indices/IndexStruct.ts
index 462fa92150c237862fb440b214b8a029e3a4770d..fd15a09e5bb6f0ee5c3f6f5844c8aa6997fdc23f 100644
--- a/packages/llamaindex/src/indices/IndexStruct.ts
+++ b/packages/llamaindex/src/indices/IndexStruct.ts
@@ -5,9 +5,9 @@ import { randomUUID } from "@llamaindex/env";
  */
 export abstract class IndexStruct {
   indexId: string;
-  summary?: string;
+  summary?: string | undefined;
 
-  constructor(indexId = randomUUID(), summary = undefined) {
+  constructor(indexId = randomUUID(), summary: string | undefined = undefined) {
     this.indexId = indexId;
     this.summary = summary;
   }
diff --git a/packages/llamaindex/src/indices/keyword/index.ts b/packages/llamaindex/src/indices/keyword/index.ts
index 4f53b34ffdf4df5cdf3126745cb2b6bc771adbd5..4a6e996419fa0cae6af26e0776db44d48d5b22dd 100644
--- a/packages/llamaindex/src/indices/keyword/index.ts
+++ b/packages/llamaindex/src/indices/keyword/index.ts
@@ -101,7 +101,7 @@ abstract class BaseKeywordTableRetriever implements BaseRetriever {
     }
 
     const sortedChunkIndices = Object.keys(chunkIndicesCount)
-      .sort((a, b) => chunkIndicesCount[b] - chunkIndicesCount[a])
+      .sort((a, b) => chunkIndicesCount[b]! - chunkIndicesCount[a]!)
       .slice(0, this.numChunksPerQuery);
 
     const sortedNodes = await this.docstore.getNodes(sortedChunkIndices);
@@ -175,7 +175,7 @@ export class KeywordTableIndex extends BaseIndex<KeywordTable> {
     if (options.indexStruct) {
       indexStruct = options.indexStruct;
     } else if (indexStructs.length == 1) {
-      indexStruct = indexStructs[0];
+      indexStruct = indexStructs[0]!;
     } else if (indexStructs.length > 1 && options.indexId) {
       indexStruct = (await indexStore.getIndexStruct(
         options.indexId,
diff --git a/packages/llamaindex/src/indices/keyword/utils.ts b/packages/llamaindex/src/indices/keyword/utils.ts
index e3594ad2f440da31e160c66f759394e379b1db4b..706c51663cbaf1ced2b8135b5de13a5e8f5f2842 100644
--- a/packages/llamaindex/src/indices/keyword/utils.ts
+++ b/packages/llamaindex/src/indices/keyword/utils.ts
@@ -58,7 +58,7 @@ export function simpleExtractKeywords(
 
   // Sorting tokens by frequency
   const sortedTokens: string[] = Object.keys(valueCounts).sort(
-    (a, b) => valueCounts[b] - valueCounts[a],
+    (a, b) => valueCounts[b]! - valueCounts[a]!,
   );
 
   const keywords: string[] = maxKeywords
diff --git a/packages/llamaindex/src/indices/summary/index.ts b/packages/llamaindex/src/indices/summary/index.ts
index 512576c7b4b362caf2758bfd4ace771f2cf06e62..43ef6181f433e657325dbf4ecc93fbfe35d485ca 100644
--- a/packages/llamaindex/src/indices/summary/index.ts
+++ b/packages/llamaindex/src/indices/summary/index.ts
@@ -48,11 +48,11 @@ export enum SummaryRetrieverMode {
 }
 
 export interface SummaryIndexOptions {
-  nodes?: BaseNode[];
-  indexStruct?: IndexList;
-  indexId?: string;
-  serviceContext?: ServiceContext;
-  storageContext?: StorageContext;
+  nodes?: BaseNode[] | undefined;
+  indexStruct?: IndexList | undefined;
+  indexId?: string | undefined;
+  serviceContext?: ServiceContext | undefined;
+  storageContext?: StorageContext | undefined;
 }
 
 /**
@@ -83,7 +83,9 @@ export class SummaryIndex extends BaseIndex<IndexList> {
       indexStruct = options.indexStruct;
     } else if (indexStructs.length == 1) {
       indexStruct =
-        indexStructs[0].type === IndexStructType.LIST ? indexStructs[0] : null;
+        indexStructs[0]!.type === IndexStructType.LIST
+          ? indexStructs[0]!
+          : null;
     } else if (indexStructs.length > 1 && options.indexId) {
       indexStruct = (await indexStore.getIndexStruct(
         options.indexId,
@@ -131,8 +133,8 @@ export class SummaryIndex extends BaseIndex<IndexList> {
   static async fromDocuments(
     documents: Document[],
     args: {
-      storageContext?: StorageContext;
-      serviceContext?: ServiceContext;
+      storageContext?: StorageContext | undefined;
+      serviceContext?: ServiceContext | undefined;
     } = {},
   ): Promise<SummaryIndex> {
     let { storageContext, serviceContext } = args;
@@ -312,7 +314,7 @@ export class SummaryIndexLLMRetriever implements BaseRetriever {
   choiceBatchSize: number;
   formatNodeBatchFn: NodeFormatterFunction;
   parseChoiceSelectAnswerFn: ChoiceSelectParserFunction;
-  serviceContext?: ServiceContext;
+  serviceContext?: ServiceContext | undefined;
 
   // eslint-disable-next-line max-params
   constructor(
diff --git a/packages/llamaindex/src/indices/summary/utils.ts b/packages/llamaindex/src/indices/summary/utils.ts
index 3f96cadd65fd36757046be9194b47f0ec31f9279..dc5eae948ea03b09a0cd73c1f71d30480c1f2e69 100644
--- a/packages/llamaindex/src/indices/summary/utils.ts
+++ b/packages/llamaindex/src/indices/summary/utils.ts
@@ -51,8 +51,8 @@ export const defaultParseChoiceSelectAnswerFn: ChoiceSelectParserFunction = (
   return lineTokens.reduce(
     (parseResult: ChoiceSelectParseResult, lineToken: string[]) => {
       try {
-        const docNum = parseInt(lineToken[0].split(":")[1].trim());
-        const answerRelevance = parseFloat(lineToken[1].split(":")[1].trim());
+        const docNum = parseInt(lineToken[0]!.split(":")[1]!.trim());
+        const answerRelevance = parseFloat(lineToken[1]!.split(":")[1]!.trim());
         if (docNum < 1 || docNum > numChoices) {
           if (raiseErr) {
             throw new Error(
diff --git a/packages/llamaindex/src/indices/vectorStore/index.ts b/packages/llamaindex/src/indices/vectorStore/index.ts
index 3959a46781f3a957232c37f2a9414ee4289e7ec0..af769810c2c2cd7423a7fdd8490c3e04c6c0f01e 100644
--- a/packages/llamaindex/src/indices/vectorStore/index.ts
+++ b/packages/llamaindex/src/indices/vectorStore/index.ts
@@ -44,20 +44,20 @@ import { BaseIndex } from "../BaseIndex.js";
 import { IndexDict, IndexStructType } from "../json-to-index-struct.js";
 
 interface IndexStructOptions {
-  indexStruct?: IndexDict;
-  indexId?: string;
+  indexStruct?: IndexDict | undefined;
+  indexId?: string | undefined;
 }
 export interface VectorIndexOptions extends IndexStructOptions {
-  nodes?: BaseNode[];
-  serviceContext?: ServiceContext;
-  storageContext?: StorageContext;
-  vectorStores?: VectorStoreByType;
-  logProgress?: boolean;
+  nodes?: BaseNode[] | undefined;
+  serviceContext?: ServiceContext | undefined;
+  storageContext?: StorageContext | undefined;
+  vectorStores?: VectorStoreByType | undefined;
+  logProgress?: boolean | undefined;
 }
 
 export interface VectorIndexConstructorProps extends BaseIndexInit<IndexDict> {
   indexStore: BaseIndexStore;
-  vectorStores?: VectorStoreByType;
+  vectorStores?: VectorStoreByType | undefined;
 }
 
 /**
@@ -65,7 +65,7 @@ export interface VectorIndexConstructorProps extends BaseIndexInit<IndexDict> {
  */
 export class VectorStoreIndex extends BaseIndex<IndexDict> {
   indexStore: BaseIndexStore;
-  embedModel?: BaseEmbedding;
+  embedModel?: BaseEmbedding | undefined;
   vectorStores: VectorStoreByType;
 
   private constructor(init: VectorIndexConstructorProps) {
@@ -137,7 +137,7 @@ export class VectorStoreIndex extends BaseIndex<IndexDict> {
       indexStruct = options.indexStruct;
     } else if (indexStructs.length == 1) {
       indexStruct =
-        indexStructs[0].type === IndexStructType.SIMPLE_DICT
+        indexStructs[0]!.type === IndexStructType.SIMPLE_DICT
           ? indexStructs[0]
           : undefined;
       indexStruct = indexStructs[0];
@@ -164,7 +164,7 @@ export class VectorStoreIndex extends BaseIndex<IndexDict> {
    */
   async getNodeEmbeddingResults(
     nodes: BaseNode[],
-    options?: { logProgress?: boolean },
+    options?: { logProgress?: boolean | undefined },
   ): Promise<BaseNode[]> {
     const nodeMap = splitNodesByType(nodes);
     for (const type in nodeMap) {
@@ -187,7 +187,7 @@ export class VectorStoreIndex extends BaseIndex<IndexDict> {
    */
   async buildIndexFromNodes(
     nodes: BaseNode[],
-    options?: { logProgress?: boolean },
+    options?: { logProgress?: boolean | undefined },
   ) {
     await this.insertNodes(nodes, options);
   }
@@ -314,13 +314,13 @@ export class VectorStoreIndex extends BaseIndex<IndexDict> {
     // NOTE: if the vector store keeps text,
     // we only need to add image and index nodes
     for (let i = 0; i < nodes.length; ++i) {
-      const { type } = nodes[i];
+      const { type } = nodes[i]!;
       if (
         !vectorStore.storesText ||
         type === ObjectType.INDEX ||
         type === ObjectType.IMAGE
       ) {
-        const nodeWithoutEmbedding = nodes[i].clone();
+        const nodeWithoutEmbedding = nodes[i]!.clone();
         nodeWithoutEmbedding.embedding = undefined;
         this.indexStruct.addNode(nodeWithoutEmbedding, newIds[i]);
         await this.docStore.addDocuments([nodeWithoutEmbedding], true);
@@ -330,7 +330,7 @@ export class VectorStoreIndex extends BaseIndex<IndexDict> {
 
   async insertNodes(
     nodes: BaseNode[],
-    options?: { logProgress?: boolean },
+    options?: { logProgress?: boolean | undefined },
   ): Promise<void> {
     if (!nodes || nodes.length === 0) {
       return;
@@ -384,8 +384,8 @@ type TopKMap = { [P in ModalityType]: number };
 
 export type VectorIndexRetrieverOptions = {
   index: VectorStoreIndex;
-  similarityTopK?: number;
-  topK?: TopKMap;
+  similarityTopK?: number | undefined;
+  topK?: TopKMap | undefined;
   filters?: MetadataFilters;
 };
 
@@ -393,8 +393,8 @@ export class VectorIndexRetriever implements BaseRetriever {
   index: VectorStoreIndex;
   topK: TopKMap;
 
-  serviceContext?: ServiceContext;
-  filters?: MetadataFilters;
+  serviceContext?: ServiceContext | undefined;
+  filters?: MetadataFilters | undefined;
 
   constructor({
     index,
@@ -467,7 +467,7 @@ export class VectorIndexRetriever implements BaseRetriever {
         const result = await vectorStore.query({
           queryEmbedding,
           mode: VectorStoreQueryMode.DEFAULT,
-          similarityTopK: this.topK[type],
+          similarityTopK: this.topK[type]!,
           filters: this.filters ?? filters ?? undefined,
         });
         nodes = nodes.concat(this.buildNodeListFromQueryResult(result));
@@ -480,11 +480,11 @@ export class VectorIndexRetriever implements BaseRetriever {
     const nodesWithScores: NodeWithScore[] = [];
     for (let i = 0; i < result.ids.length; i++) {
       const nodeFromResult = result.nodes?.[i];
-      if (!this.index.indexStruct.nodesDict[result.ids[i]] && nodeFromResult) {
-        this.index.indexStruct.nodesDict[result.ids[i]] = nodeFromResult;
+      if (!this.index.indexStruct.nodesDict[result.ids[i]!] && nodeFromResult) {
+        this.index.indexStruct.nodesDict[result.ids[i]!] = nodeFromResult;
       }
 
-      const node = this.index.indexStruct.nodesDict[result.ids[i]];
+      const node = this.index.indexStruct.nodesDict[result.ids[i]!]!;
       // XXX: Hack, if it's an image node, we reconstruct the image from the URL
       // Alternative: Store image in doc store and retrieve it here
       if (node instanceof ImageNode) {
@@ -493,7 +493,7 @@ export class VectorIndexRetriever implements BaseRetriever {
 
       nodesWithScores.push({
         node: node,
-        score: result.similarities[i],
+        score: result.similarities[i]!,
       });
     }
 
diff --git a/packages/llamaindex/src/ingestion/IngestionPipeline.ts b/packages/llamaindex/src/ingestion/IngestionPipeline.ts
index 8b7f355aab3ad5496f1b355477773e3e20ee77ce..c19e42aad7a89c657b1d5e6a36e0ae1b9432f006 100644
--- a/packages/llamaindex/src/ingestion/IngestionPipeline.ts
+++ b/packages/llamaindex/src/ingestion/IngestionPipeline.ts
@@ -60,13 +60,13 @@ export async function runTransformations(
 
 export class IngestionPipeline {
   transformations: TransformComponent[] = [];
-  documents?: Document[];
-  reader?: BaseReader;
-  vectorStore?: VectorStore;
-  vectorStores?: VectorStoreByType;
+  documents?: Document[] | undefined;
+  reader?: BaseReader | undefined;
+  vectorStore?: VectorStore | undefined;
+  vectorStores?: VectorStoreByType | undefined;
   docStore?: BaseDocumentStore;
   docStoreStrategy: DocStoreStrategy = DocStoreStrategy.UPSERTS;
-  cache?: IngestionCache;
+  cache?: IngestionCache | undefined;
   disableCache: boolean = false;
 
   private _docStoreStrategy?: TransformComponent;
diff --git a/packages/llamaindex/src/ingestion/strategies/UpsertsAndDeleteStrategy.ts b/packages/llamaindex/src/ingestion/strategies/UpsertsAndDeleteStrategy.ts
index 93c6aa49c26570c41d0733618e67b148fdb42126..5668551e8a92ab03b7567d2e03ab99396ba5a53a 100644
--- a/packages/llamaindex/src/ingestion/strategies/UpsertsAndDeleteStrategy.ts
+++ b/packages/llamaindex/src/ingestion/strategies/UpsertsAndDeleteStrategy.ts
@@ -9,7 +9,7 @@ import { classify } from "./classify.js";
  */
 export class UpsertsAndDeleteStrategy extends TransformComponent {
   protected docStore: BaseDocumentStore;
-  protected vectorStores?: VectorStore[];
+  protected vectorStores: VectorStore[] | undefined;
 
   constructor(docStore: BaseDocumentStore, vectorStores?: VectorStore[]) {
     super(async (nodes: BaseNode[]): Promise<BaseNode[]> => {
diff --git a/packages/llamaindex/src/ingestion/strategies/UpsertsStrategy.ts b/packages/llamaindex/src/ingestion/strategies/UpsertsStrategy.ts
index efeae560f8f743431aba8acfa4333a51a0a9583c..7928f4783c4c693941ea6fe0f6d930e22371e62b 100644
--- a/packages/llamaindex/src/ingestion/strategies/UpsertsStrategy.ts
+++ b/packages/llamaindex/src/ingestion/strategies/UpsertsStrategy.ts
@@ -8,7 +8,7 @@ import { classify } from "./classify.js";
  */
 export class UpsertsStrategy extends TransformComponent {
   protected docStore: BaseDocumentStore;
-  protected vectorStores?: VectorStore[];
+  protected vectorStores: VectorStore[] | undefined;
 
   constructor(docStore: BaseDocumentStore, vectorStores?: VectorStore[]) {
     super(async (nodes: BaseNode[]): Promise<BaseNode[]> => {
diff --git a/packages/llamaindex/src/internal/utils.ts b/packages/llamaindex/src/internal/utils.ts
index 10a06b394accecee066736b0462836bbb1bc129e..1e18c474fe1b8e89ba332dde86024c21e21add06 100644
--- a/packages/llamaindex/src/internal/utils.ts
+++ b/packages/llamaindex/src/internal/utils.ts
@@ -63,7 +63,7 @@ export function getTopKEmbeddings(
   const similarities: { similarity: number; id: number }[] = [];
 
   for (let i = 0; i < embeddings.length; i++) {
-    const sim = similarity(queryEmbedding, embeddings[i]);
+    const sim = similarity(queryEmbedding, embeddings[i]!);
     if (similarityCutoff == null || sim > similarityCutoff) {
       similarities.push({ similarity: sim, id: embeddingIds[i] });
     }
@@ -78,8 +78,8 @@ export function getTopKEmbeddings(
     if (i >= similarities.length) {
       break;
     }
-    resultSimilarities.push(similarities[i].similarity);
-    resultIds.push(similarities[i].id);
+    resultSimilarities.push(similarities[i]!.similarity);
+    resultIds.push(similarities[i]!.id);
   }
 
   return [resultSimilarities, resultIds];
@@ -176,7 +176,7 @@ export async function imageToString(input: ImageType): Promise<string> {
 export function stringToImage(input: string): ImageType {
   if (input.startsWith("data:")) {
     // if the input is a base64 data URL, convert it back to a Blob
-    const base64Data = input.split(",")[1];
+    const base64Data = input.split(",")[1]!;
     const byteArray = Buffer.from(base64Data, "base64");
     return new Blob([byteArray]);
   } else if (input.startsWith("http://") || input.startsWith("https://")) {
diff --git a/packages/llamaindex/src/llm/anthropic.ts b/packages/llamaindex/src/llm/anthropic.ts
index 90345ea84189b2b894a5c13c6d5faae27fdbc32a..e3b4abfa69a1ddcb8f136f127c87b5a306c849ad 100644
--- a/packages/llamaindex/src/llm/anthropic.ts
+++ b/packages/llamaindex/src/llm/anthropic.ts
@@ -109,10 +109,10 @@ export class Anthropic extends ToolCallLLM<AnthropicAdditionalChatOptions> {
   model: keyof typeof ALL_AVAILABLE_ANTHROPIC_MODELS;
   temperature: number;
   topP: number;
-  maxTokens?: number;
+  maxTokens?: number | undefined;
 
   // Anthropic session params
-  apiKey?: string = undefined;
+  apiKey?: string | undefined;
   maxRetries: number;
   timeout?: number;
   session: AnthropicSession;
@@ -153,7 +153,7 @@ export class Anthropic extends ToolCallLLM<AnthropicAdditionalChatOptions> {
 
   getModelName = (model: string): string => {
     if (Object.keys(AVAILABLE_ANTHROPIC_MODELS_WITHOUT_DATE).includes(model)) {
-      return AVAILABLE_ANTHROPIC_MODELS_WITHOUT_DATE[model];
+      return AVAILABLE_ANTHROPIC_MODELS_WITHOUT_DATE[model]!;
     }
     return model;
   };
@@ -247,11 +247,11 @@ export class Anthropic extends ToolCallLLM<AnthropicAdditionalChatOptions> {
     const realResult: MessageParam[] = [];
     for (let i = 0; i < result.length; i++) {
       if (i === 0) {
-        realResult.push(result[i]);
+        realResult.push(result[i]!);
         continue;
       }
-      const current = result[i];
-      const previous = result[i - 1];
+      const current = result[i]!;
+      const previous = result[i - 1]!;
       if (current.role === previous.role) {
         // merge two messages with the same role
         if (Array.isArray(previous.content)) {
diff --git a/packages/llamaindex/src/llm/azure.ts b/packages/llamaindex/src/llm/azure.ts
index a91e9c3cc2d252afd43f2f9b1413fcf10c96dc52..7c64513c78a86b88c2a95f8e8e60dc4a330d0811 100644
--- a/packages/llamaindex/src/llm/azure.ts
+++ b/packages/llamaindex/src/llm/azure.ts
@@ -4,7 +4,7 @@ import type { AzureClientOptions } from "openai";
 
 export interface AzureOpenAIConfig extends AzureClientOptions {
   /** @deprecated use "deployment" instead */
-  deploymentName?: string;
+  deploymentName?: string | undefined;
 }
 
 // NOTE we're not supporting the legacy models as they're not available for new deployments
diff --git a/packages/llamaindex/src/llm/gemini/base.ts b/packages/llamaindex/src/llm/gemini/base.ts
index 0b05c2848911a775c32669d55c2101aa93eea119..48d38b5b9db6595ebfec69c58210c0d6e05531e2 100644
--- a/packages/llamaindex/src/llm/gemini/base.ts
+++ b/packages/llamaindex/src/llm/gemini/base.ts
@@ -79,7 +79,7 @@ export class GeminiSession implements IGeminiSession {
 
   constructor(options: GoogleGeminiSessionOptions) {
     if (!options.apiKey) {
-      options.apiKey = getEnv("GOOGLE_API_KEY");
+      options.apiKey = getEnv("GOOGLE_API_KEY")!;
     }
     if (!options.apiKey) {
       throw new Error("Set Google API Key in GOOGLE_API_KEY env variable");
@@ -187,7 +187,7 @@ export class Gemini extends ToolCallLLM<GeminiAdditionalChatOptions> {
   model: GEMINI_MODEL;
   temperature: number;
   topP: number;
-  maxTokens?: number;
+  maxTokens?: number | undefined;
   session: IGeminiSession;
 
   constructor(init?: GeminiConfig) {
@@ -219,19 +219,26 @@ export class Gemini extends ToolCallLLM<GeminiAdditionalChatOptions> {
   ): Promise<GeminiChatNonStreamResponse> {
     const context = getChatContext(params);
     const client = this.session.getGenerativeModel(this.metadata);
-    const chat = client.startChat({
-      history: context.history,
-      tools: params.tools && [
-        {
-          functionDeclarations: params.tools.map(
-            mapBaseToolToGeminiFunctionDeclaration,
-          ),
-        },
-      ],
-      safetySettings: DEFAULT_SAFETY_SETTINGS,
-    });
+    const chat = client.startChat(
+      params.tools
+        ? {
+            history: context.history,
+            tools: [
+              {
+                functionDeclarations: params.tools.map(
+                  mapBaseToolToGeminiFunctionDeclaration,
+                ),
+              },
+            ],
+            safetySettings: DEFAULT_SAFETY_SETTINGS,
+          }
+        : {
+            history: context.history,
+            safetySettings: DEFAULT_SAFETY_SETTINGS,
+          },
+    );
     const { response } = await chat.sendMessage(context.message);
-    const topCandidate = response.candidates![0];
+    const topCandidate = response.candidates![0]!;
 
     const tools = this.session.getToolsFromResponse(response);
     const options: ToolCallLLMMessageOptions = tools?.length
@@ -255,17 +262,24 @@ export class Gemini extends ToolCallLLM<GeminiAdditionalChatOptions> {
   ): GeminiChatStreamResponse {
     const context = getChatContext(params);
     const client = this.session.getGenerativeModel(this.metadata);
-    const chat = client.startChat({
-      history: context.history,
-      tools: params.tools && [
-        {
-          functionDeclarations: params.tools.map(
-            mapBaseToolToGeminiFunctionDeclaration,
-          ),
-        },
-      ],
-      safetySettings: DEFAULT_SAFETY_SETTINGS,
-    });
+    const chat = client.startChat(
+      params.tools
+        ? {
+            history: context.history,
+            tools: [
+              {
+                functionDeclarations: params.tools.map(
+                  mapBaseToolToGeminiFunctionDeclaration,
+                ),
+              },
+            ],
+            safetySettings: DEFAULT_SAFETY_SETTINGS,
+          }
+        : {
+            history: context.history,
+            safetySettings: DEFAULT_SAFETY_SETTINGS,
+          },
+    );
     const result = await chat.sendMessageStream(context.message);
     yield* this.session.getChatStream(result);
   }
diff --git a/packages/llamaindex/src/llm/gemini/utils.ts b/packages/llamaindex/src/llm/gemini/utils.ts
index 85dc53a364d152845aa251e958e8d79754f9b86f..f064959f122f518ab1e24aa10635ffea68922c7f 100644
--- a/packages/llamaindex/src/llm/gemini/utils.ts
+++ b/packages/llamaindex/src/llm/gemini/utils.ts
@@ -103,7 +103,7 @@ export const getPartsText = (parts: Part[]): string => {
  * Returns all text found in all parts of first candidate.
  */
 export const getText = (response: GenerateContentResponse): string => {
-  if (response.candidates?.[0].content?.parts) {
+  if (response.candidates?.[0]!.content?.parts) {
     return getPartsText(response.candidates?.[0].content?.parts);
   }
   return "";
@@ -150,7 +150,7 @@ export const getChatContext = (
   ).map(cleanParts);
 
   const history = messages.slice(0, -1);
-  const message = messages[messages.length - 1].parts;
+  const message = messages[messages.length - 1]!.parts;
   return {
     history,
     message,
@@ -210,9 +210,9 @@ export class GeminiHelper {
           index: number,
           original: GeminiMessageContent[],
         ) => {
-          if (index > 0 && original[index - 1].role === current.role) {
-            result[result.length - 1].parts = [
-              ...result[result.length - 1].parts,
+          if (index > 0 && original[index - 1]!.role === current.role) {
+            result[result.length - 1]!.parts = [
+              ...result[result.length - 1]!.parts,
               ...current.parts,
             ];
           } else {
@@ -297,7 +297,7 @@ export function getFunctionCalls(
   response: GenerateContentResponse,
 ): FunctionCall[] | undefined {
   const functionCalls: FunctionCall[] = [];
-  if (response.candidates?.[0].content?.parts) {
+  if (response.candidates?.[0]!.content?.parts) {
     for (const part of response.candidates?.[0].content?.parts) {
       if (part.functionCall) {
         functionCalls.push(part.functionCall);
diff --git a/packages/llamaindex/src/llm/huggingface.ts b/packages/llamaindex/src/llm/huggingface.ts
index aa9f8aa2e806a5cf0912b80a2d598442200be6df..86fb40529d328daa0a2854e632090fb5bec7c6c7 100644
--- a/packages/llamaindex/src/llm/huggingface.ts
+++ b/packages/llamaindex/src/llm/huggingface.ts
@@ -81,7 +81,7 @@ export class HuggingFaceInferenceAPI extends BaseLLM {
   model: string;
   temperature: number;
   topP: number;
-  maxTokens?: number;
+  maxTokens?: number | undefined;
   contextWindow: number;
   hf: HfInference;
 
@@ -197,7 +197,7 @@ export class HuggingFaceLLM extends BaseLLM {
   tokenizerName: string;
   temperature: number;
   topP: number;
-  maxTokens?: number;
+  maxTokens?: number | undefined;
   contextWindow: number;
 
   private tokenizer: PreTrainedTokenizer | null = null;
diff --git a/packages/llamaindex/src/llm/mistral.ts b/packages/llamaindex/src/llm/mistral.ts
index b3d7d4ebb00415f9aa605b4dbef68c97759bf078..fa60cd768a292fec2f6bb815a7f6f0d6d7937f90 100644
--- a/packages/llamaindex/src/llm/mistral.ts
+++ b/packages/llamaindex/src/llm/mistral.ts
@@ -15,14 +15,14 @@ export const ALL_AVAILABLE_MISTRAL_MODELS = {
 };
 
 export class MistralAISession {
-  apiKey?: string;
+  apiKey: string;
   private client: any;
 
-  constructor(init?: Partial<MistralAISession>) {
+  constructor(init?: { apiKey?: string | undefined }) {
     if (init?.apiKey) {
       this.apiKey = init?.apiKey;
     } else {
-      this.apiKey = getEnv("MISTRAL_API_KEY");
+      this.apiKey = getEnv("MISTRAL_API_KEY")!;
     }
     if (!this.apiKey) {
       throw new Error("Set Mistral API key in MISTRAL_API_KEY env variable"); // Overriding MistralAI package's error message
@@ -48,10 +48,10 @@ export class MistralAI extends BaseLLM {
   model: keyof typeof ALL_AVAILABLE_MISTRAL_MODELS;
   temperature: number;
   topP: number;
-  maxTokens?: number;
+  maxTokens?: number | undefined;
   apiKey?: string;
   safeMode: boolean;
-  randomSeed?: number;
+  randomSeed?: number | undefined;
 
   private session: MistralAISession;
 
diff --git a/packages/llamaindex/src/llm/openai.ts b/packages/llamaindex/src/llm/openai.ts
index 198fedd1dc39edce07c3d4ecef49bc49b4e359a2..eb8ac39c1788c0dfc49f7969bcf935be1b355e71 100644
--- a/packages/llamaindex/src/llm/openai.ts
+++ b/packages/llamaindex/src/llm/openai.ts
@@ -172,18 +172,17 @@ export class OpenAI extends ToolCallLLM<OpenAIAdditionalChatOptions> {
     | (string & {});
   temperature: number;
   topP: number;
-  maxTokens?: number;
-  additionalChatOptions?: OpenAIAdditionalChatOptions;
+  maxTokens?: number | undefined;
+  additionalChatOptions?: OpenAIAdditionalChatOptions | undefined;
 
   // OpenAI session params
-  apiKey?: string = undefined;
+  apiKey?: string | undefined = undefined;
   maxRetries: number;
   timeout?: number;
   session: OpenAISession;
-  additionalSessionOptions?: Omit<
-    Partial<OpenAIClientOptions>,
-    "apiKey" | "maxRetries" | "timeout"
-  >;
+  additionalSessionOptions?:
+    | undefined
+    | Omit<Partial<OpenAIClientOptions>, "apiKey" | "maxRetries" | "timeout">;
 
   constructor(
     init?: Partial<OpenAI> & {
@@ -342,7 +341,7 @@ export class OpenAI extends ToolCallLLM<OpenAIAdditionalChatOptions> {
     | AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>>
   > {
     const { messages, stream, tools, additionalChatOptions } = params;
-    const baseRequestParams: OpenAILLM.Chat.ChatCompletionCreateParams = {
+    const baseRequestParams = <OpenAILLM.Chat.ChatCompletionCreateParams>{
       model: this.model,
       temperature: this.temperature,
       max_tokens: this.maxTokens,
@@ -371,16 +370,16 @@ export class OpenAI extends ToolCallLLM<OpenAIAdditionalChatOptions> {
       stream: false,
     });
 
-    const content = response.choices[0].message?.content ?? "";
+    const content = response.choices[0]!.message?.content ?? "";
 
     return {
       raw: response,
       message: {
         content,
-        role: response.choices[0].message.role,
-        options: response.choices[0].message?.tool_calls
+        role: response.choices[0]!.message.role,
+        options: response.choices[0]!.message?.tool_calls
           ? {
-              toolCall: response.choices[0].message.tool_calls.map(
+              toolCall: response.choices[0]!.message.tool_calls.map(
                 (toolCall) => ({
                   id: toolCall.id,
                   name: toolCall.function.name,
@@ -410,13 +409,13 @@ export class OpenAI extends ToolCallLLM<OpenAIAdditionalChatOptions> {
     const toolCallMap = new Map<string, PartialToolCall>();
     for await (const part of stream) {
       if (part.choices.length === 0) continue;
-      const choice = part.choices[0];
+      const choice = part.choices[0]!;
       // skip parts that don't have any content
       if (!(choice.delta.content || choice.delta.tool_calls)) continue;
 
       let shouldEmitToolCall: PartialToolCall | null = null;
       if (
-        choice.delta.tool_calls?.[0].id &&
+        choice.delta.tool_calls?.[0]!.id &&
         currentToolCall &&
         choice.delta.tool_calls?.[0].id !== currentToolCall.id
       ) {
@@ -425,7 +424,7 @@ export class OpenAI extends ToolCallLLM<OpenAIAdditionalChatOptions> {
           input: JSON.parse(currentToolCall.input),
         };
       }
-      if (choice.delta.tool_calls?.[0].id) {
+      if (choice.delta.tool_calls?.[0]!.id) {
         currentToolCall = {
           name: choice.delta.tool_calls[0].function!.name!,
           id: choice.delta.tool_calls[0].id,
@@ -433,7 +432,7 @@ export class OpenAI extends ToolCallLLM<OpenAIAdditionalChatOptions> {
         };
         toolCallMap.set(choice.delta.tool_calls[0].id, currentToolCall);
       } else {
-        if (choice.delta.tool_calls?.[0].function?.arguments) {
+        if (choice.delta.tool_calls?.[0]!.function?.arguments) {
           currentToolCall!.input +=
             choice.delta.tool_calls[0].function.arguments;
         }
@@ -468,11 +467,16 @@ export class OpenAI extends ToolCallLLM<OpenAIAdditionalChatOptions> {
   static toTool(tool: BaseTool): ChatCompletionTool {
     return {
       type: "function",
-      function: {
-        name: tool.metadata.name,
-        description: tool.metadata.description,
-        parameters: tool.metadata.parameters,
-      },
+      function: tool.metadata.parameters
+        ? {
+            name: tool.metadata.name,
+            description: tool.metadata.description,
+            parameters: tool.metadata.parameters,
+          }
+        : {
+            name: tool.metadata.name,
+            description: tool.metadata.description,
+          },
     };
   }
 }
diff --git a/packages/llamaindex/src/llm/portkey.ts b/packages/llamaindex/src/llm/portkey.ts
index b45762fc62949523de7916a5656d7e927a8d208b..19aa17a3cfec6ac55ae396d5a84ed28182e35e43 100644
--- a/packages/llamaindex/src/llm/portkey.ts
+++ b/packages/llamaindex/src/llm/portkey.ts
@@ -20,7 +20,7 @@ export class PortkeySession {
 
   constructor(options: PortkeyOptions = {}) {
     if (!options.apiKey) {
-      options.apiKey = getEnv("PORTKEY_API_KEY");
+      options.apiKey = getEnv("PORTKEY_API_KEY")!;
     }
 
     if (!options.baseURL) {
@@ -60,8 +60,8 @@ export function getPortkeySession(options: PortkeyOptions = {}) {
 }
 
 export class Portkey extends BaseLLM {
-  apiKey?: string = undefined;
-  baseURL?: string = undefined;
+  apiKey?: string | undefined = undefined;
+  baseURL?: string | undefined = undefined;
   session: PortkeySession;
 
   constructor(init?: Partial<Portkey> & PortkeyOptions) {
@@ -101,8 +101,8 @@ export class Portkey extends BaseLLM {
         ...bodyParams,
       });
 
-      const content = response.choices[0].message?.content ?? "";
-      const role = response.choices[0].message?.role || "assistant";
+      const content = response.choices[0]!.message?.content ?? "";
+      const role = response.choices[0]!.message?.role || "assistant";
       return { raw: response, message: { content, role: role as MessageType } };
     }
   }
@@ -123,11 +123,11 @@ export class Portkey extends BaseLLM {
     //Indices
     let idx_counter: number = 0;
     for await (const part of chunkStream) {
-      part.choices[0].index = idx_counter;
+      part.choices[0]!.index = idx_counter;
 
       idx_counter++;
 
-      yield { raw: part, delta: part.choices[0].delta?.content ?? "" };
+      yield { raw: part, delta: part.choices[0]!.delta?.content ?? "" };
     }
     return;
   }
diff --git a/packages/llamaindex/src/llm/replicate_ai.ts b/packages/llamaindex/src/llm/replicate_ai.ts
index 80a532f3f91d73ce43def0bf724717560678856c..e39e8b7d926a69d690803c38b979cf3d718ac295 100644
--- a/packages/llamaindex/src/llm/replicate_ai.ts
+++ b/packages/llamaindex/src/llm/replicate_ai.ts
@@ -183,8 +183,8 @@ export class ReplicateLLM extends BaseLLM {
           if (typeof message.content === "string") {
             content = message.content;
           } else {
-            if (message.content[0].type === "text") {
-              content = message.content[0].text;
+            if (message.content[0]!.type === "text") {
+              content = message.content[0]!.text;
             } else {
               content = "";
             }
@@ -258,7 +258,7 @@ If a question does not make any sense, or is not factually coherent, explain why
     messages = [...messages]; // so we can use shift without mutating the original array
 
     let systemPrompt = undefined;
-    if (messages[0].role === "system") {
+    if (messages[0]!.role === "system") {
       const systemMessage = messages.shift()!;
 
       if (replicate4Bit) {
@@ -274,13 +274,13 @@ If a question does not make any sense, or is not factually coherent, explain why
           );
         }
 
-        const userContent = messages[0].content;
+        const userContent = messages[0]!.content;
 
-        messages[0].content = `${systemStr}${userContent}`;
+        messages[0]!.content = `${systemStr}${userContent}`;
       }
     } else {
       if (!replicate4Bit) {
-        messages[0].content = `${B_SYS}${DEFAULT_SYSTEM_PROMPT}${E_SYS}${messages[0].content}`;
+        messages[0]!.content = `${B_SYS}${DEFAULT_SYSTEM_PROMPT}${E_SYS}${messages[0]!.content}`;
       }
     }
 
diff --git a/packages/llamaindex/src/objects/base.ts b/packages/llamaindex/src/objects/base.ts
index a13095d452e26fcd83ea2f67b6f2813c09d175a6..5737a351fa1022fe48dfcab16f1663d790a0c029 100644
--- a/packages/llamaindex/src/objects/base.ts
+++ b/packages/llamaindex/src/objects/base.ts
@@ -119,7 +119,7 @@ export class SimpleToolNodeMapping extends BaseObjectNodeMapping {
     if (!node.metadata) {
       throw new Error("Metadata must be set");
     }
-    return this._tools[node.metadata.name];
+    return this._tools[node.metadata.name]!;
   }
 
   persist(persistDir: string, objNodeMappingFilename: string): void {
diff --git a/packages/llamaindex/src/postprocessors/SimilarityPostprocessor.ts b/packages/llamaindex/src/postprocessors/SimilarityPostprocessor.ts
index 92df463b52013d88a799a104ff9e1f63b54d13d4..705af5d154704baa85f7404d619f77a33d826ae9 100644
--- a/packages/llamaindex/src/postprocessors/SimilarityPostprocessor.ts
+++ b/packages/llamaindex/src/postprocessors/SimilarityPostprocessor.ts
@@ -2,9 +2,9 @@ import type { NodeWithScore } from "@llamaindex/core/schema";
 import type { BaseNodePostprocessor } from "./types.js";
 
 export class SimilarityPostprocessor implements BaseNodePostprocessor {
-  similarityCutoff?: number;
+  similarityCutoff?: number | undefined;
 
-  constructor(options?: { similarityCutoff?: number }) {
+  constructor(options?: { similarityCutoff?: number | undefined }) {
     this.similarityCutoff = options?.similarityCutoff;
   }
 
diff --git a/packages/llamaindex/src/postprocessors/rerankers/CohereRerank.ts b/packages/llamaindex/src/postprocessors/rerankers/CohereRerank.ts
index f2288364c4cc18b941ba48c9b20cfd441fb29bca..d8213852bb1dbea97babdac5927a8a8adee1a069 100644
--- a/packages/llamaindex/src/postprocessors/rerankers/CohereRerank.ts
+++ b/packages/llamaindex/src/postprocessors/rerankers/CohereRerank.ts
@@ -72,7 +72,7 @@ export class CohereRerank implements BaseNodePostprocessor {
     const newNodes: NodeWithScore[] = [];
 
     for (const result of results.results) {
-      const node = nodes[result.index];
+      const node = nodes[result.index]!;
 
       newNodes.push({
         node: node.node,
diff --git a/packages/llamaindex/src/postprocessors/rerankers/JinaAIReranker.ts b/packages/llamaindex/src/postprocessors/rerankers/JinaAIReranker.ts
index 59dd3b92ef3fb60f66006b74be8f8184283921fd..324c36bf503a0313235b6e319d2242f36f429c94 100644
--- a/packages/llamaindex/src/postprocessors/rerankers/JinaAIReranker.ts
+++ b/packages/llamaindex/src/postprocessors/rerankers/JinaAIReranker.ts
@@ -21,7 +21,7 @@ export class JinaAIReranker implements BaseNodePostprocessor {
   constructor(init?: Partial<JinaAIReranker>) {
     this.topN = init?.topN ?? 2;
     this.model = init?.model ?? "jina-reranker-v1-base-en";
-    this.apiKey = getEnv("JINAAI_API_KEY");
+    this.apiKey = getEnv("JINAAI_API_KEY")!;
 
     if (!this.apiKey) {
       throw new Error(
@@ -79,7 +79,7 @@ export class JinaAIReranker implements BaseNodePostprocessor {
     const newNodes: NodeWithScore[] = [];
 
     for (const result of results) {
-      const node = nodes[result.index];
+      const node = nodes[result.index]!;
       newNodes.push({
         node: node.node,
         score: result.relevance_score,
diff --git a/packages/llamaindex/src/postprocessors/rerankers/MixedbreadAIReranker.ts b/packages/llamaindex/src/postprocessors/rerankers/MixedbreadAIReranker.ts
index 333744d263347df0a7b57487b34d8d627381ea93..fae0ca10a5e74013659c7fe0f0ab87551c04e1f4 100644
--- a/packages/llamaindex/src/postprocessors/rerankers/MixedbreadAIReranker.ts
+++ b/packages/llamaindex/src/postprocessors/rerankers/MixedbreadAIReranker.ts
@@ -103,16 +103,26 @@ export class MixedbreadAIReranker implements BaseNodePostprocessor {
         "user-agent": "@mixedbread-ai/llamaindex-ts-sdk",
       },
     };
-    this.client = new MixedbreadAIClient({
-      apiKey: apiKey,
-      environment: params?.baseUrl,
-    });
+    this.client = new MixedbreadAIClient(
+      params?.baseUrl
+        ? {
+            apiKey,
+            environment: params?.baseUrl,
+          }
+        : {
+            apiKey,
+          },
+    );
     this.requestParams = {
       model: params?.model ?? "default",
       returnInput: params?.returnInput ?? false,
-      topK: params?.topK,
-      rankFields: params?.rankFields,
     };
+    if (params?.topK) {
+      this.requestParams.topK = params.topK;
+    }
+    if (params?.rankFields) {
+      this.requestParams.rankFields = params.rankFields;
+    }
   }
 
   /**
@@ -152,7 +162,7 @@ export class MixedbreadAIReranker implements BaseNodePostprocessor {
 
     const newNodes: NodeWithScore[] = [];
     for (const document of result.data) {
-      const node = nodes[document.index];
+      const node = nodes[document.index]!;
       node.score = document.score;
       newNodes.push(node);
     }
diff --git a/packages/llamaindex/src/readers/AssemblyAIReader.ts b/packages/llamaindex/src/readers/AssemblyAIReader.ts
index ee368682dd109903d5d2e10ad759fd4fe2db08fe..d64f0f57a97c4b8e7fbe1778e79b2d08d3804505 100644
--- a/packages/llamaindex/src/readers/AssemblyAIReader.ts
+++ b/packages/llamaindex/src/readers/AssemblyAIReader.ts
@@ -35,7 +35,7 @@ abstract class AssemblyAIReader implements BaseReader {
       options = {};
     }
     if (!options.apiKey) {
-      options.apiKey = getEnv("ASSEMBLYAI_API_KEY");
+      options.apiKey = getEnv("ASSEMBLYAI_API_KEY")!;
     }
     if (!options.apiKey) {
       throw new Error(
@@ -79,7 +79,7 @@ class AudioTranscriptReader extends AssemblyAIReader {
    */
   async loadData(params: TranscribeParams | string): Promise<Document[]> {
     const transcript = await this.transcribeOrGetTranscript(params);
-    return [new Document({ text: transcript.text || undefined })];
+    return [new Document({ text: transcript.text ?? undefined })];
   }
 }
 
diff --git a/packages/llamaindex/src/readers/CSVReader.ts b/packages/llamaindex/src/readers/CSVReader.ts
index eecb6a5bc4d0eaf6307407642ebca86ae2cca55d..7f3d584c4340c891c53ef903f44c4846b470a46d 100644
--- a/packages/llamaindex/src/readers/CSVReader.ts
+++ b/packages/llamaindex/src/readers/CSVReader.ts
@@ -11,7 +11,7 @@ export class PapaCSVReader extends FileReader {
   private concatRows: boolean;
   private colJoiner: string;
   private rowJoiner: string;
-  private papaConfig?: ParseConfig;
+  private papaConfig: ParseConfig | undefined;
 
   /**
    * Constructs a new instance of the class.
diff --git a/packages/llamaindex/src/readers/LlamaParseReader.ts b/packages/llamaindex/src/readers/LlamaParseReader.ts
index b45bb9fc7761be15b7624214a5a32951d0bfb898..145b529e95be61cef14ea7c3f06163296f8bbf97 100644
--- a/packages/llamaindex/src/readers/LlamaParseReader.ts
+++ b/packages/llamaindex/src/readers/LlamaParseReader.ts
@@ -201,31 +201,31 @@ export class LlamaParseReader extends FileReader {
   // The language of the text to parse.
   language: Language = "en";
   // The parsing instruction for the parser. Backend default is an empty string.
-  parsingInstruction?: string;
+  parsingInstruction?: string | undefined;
   // Wether to ignore diagonal text (when the text rotation in degrees is not 0, 90, 180 or 270, so not a horizontal or vertical text). Backend default is false.
-  skipDiagonalText?: boolean;
+  skipDiagonalText?: boolean | undefined;
   // Wheter to ignore the cache and re-process the document. All documents are kept in cache for 48hours after the job was completed to avoid processing the same document twice. Backend default is false.
-  invalidateCache?: boolean;
+  invalidateCache?: boolean | undefined;
   // Wether the document should not be cached in the first place. Backend default is false.
-  doNotCache?: boolean;
+  doNotCache?: boolean | undefined;
   // Wether to use a faster mode to extract text from documents. This mode will skip OCR of images, and table/heading reconstruction. Note: Non-compatible with gpt4oMode. Backend default is false.
-  fastMode?: boolean;
+  fastMode?: boolean | undefined;
   // Wether to keep column in the text according to document layout. Reduce reconstruction accuracy, and LLM's/embedings performances in most cases.
-  doNotUnrollColumns?: boolean;
+  doNotUnrollColumns?: boolean | undefined;
   // A templated page separator to use to split the text. If the results contain `{page_number}` (e.g. JSON mode), it will be replaced by the next page number. If not set the default separator '\\n---\\n' will be used.
-  pageSeparator?: string;
+  pageSeparator?: string | undefined;
   //A templated prefix to add to the beginning of each page. If the results contain `{page_number}`, it will be replaced by the page number.>
-  pagePrefix?: string;
+  pagePrefix?: string | undefined;
   // A templated suffix to add to the end of each page. If the results contain `{page_number}`, it will be replaced by the page number.
-  pageSuffix?: string;
+  pageSuffix?: string | undefined;
   // Deprecated. Use vendorMultimodal params. Whether to use gpt-4o to extract text from documents.
   gpt4oMode: boolean = false;
   // Deprecated. Use vendorMultimodal params. The API key for the GPT-4o API. Optional, lowers the cost of parsing. Can be set as an env variable: LLAMA_CLOUD_GPT4O_API_KEY.
-  gpt4oApiKey?: string;
+  gpt4oApiKey?: string | undefined;
   // The bounding box to use to extract text from documents. Describe as a string containing the bounding box margins.
-  boundingBox?: string;
+  boundingBox?: string | undefined;
   // The target pages to extract text from documents. Describe as a comma separated list of page numbers. The first page of the document is page 0
-  targetPages?: string;
+  targetPages?: string | undefined;
   // Whether or not to ignore and skip errors raised during parsing.
   ignoreErrors: boolean = true;
   // Whether to split by page using the pageSeparator or '\n---\n' as default.
@@ -233,12 +233,16 @@ export class LlamaParseReader extends FileReader {
   // Whether to use the vendor multimodal API.
   useVendorMultimodalModel: boolean = false;
   // The model name for the vendor multimodal API
-  vendorMultimodalModelName?: string;
+  vendorMultimodalModelName?: string | undefined;
   // The API key for the multimodal API. Can also be set as an env variable: LLAMA_CLOUD_VENDOR_MULTIMODAL_API_KEY
-  vendorMultimodalApiKey?: string;
+  vendorMultimodalApiKey?: string | undefined;
   // numWorkers is implemented in SimpleDirectoryReader
 
-  constructor(params: Partial<LlamaParseReader> = {}) {
+  constructor(
+    params: Partial<LlamaParseReader> & {
+      apiKey?: string | undefined;
+    } = {},
+  ) {
     super();
     Object.assign(this, params);
     params.apiKey = params.apiKey ?? getEnv("LLAMA_CLOUD_API_KEY");
diff --git a/packages/llamaindex/src/readers/NotionReader.ts b/packages/llamaindex/src/readers/NotionReader.ts
index 8b2db4239a6cbe5adac4d76464b8fd254161c589..313423facdab3d5b556736c26cd2ca3b1b066b65 100644
--- a/packages/llamaindex/src/readers/NotionReader.ts
+++ b/packages/llamaindex/src/readers/NotionReader.ts
@@ -19,8 +19,8 @@ export class NotionReader implements BaseReader {
    * Constructor for the NotionReader class
    * @param {NotionReaderOptions} options - Configuration options for the reader
    */
-  constructor({ client, serializers }: NotionReaderOptions) {
-    this.crawl = crawler({ client, serializers });
+  constructor(options: NotionReaderOptions) {
+    this.crawl = crawler(options);
   }
 
   /**
diff --git a/packages/llamaindex/src/readers/SimpleDirectoryReader.edge.ts b/packages/llamaindex/src/readers/SimpleDirectoryReader.edge.ts
index 938eb89e3d72ebdaf92fda9e46b49d1459f89782..c3f5554fb4942cd5567f1aef039b64ddb6754fa7 100644
--- a/packages/llamaindex/src/readers/SimpleDirectoryReader.edge.ts
+++ b/packages/llamaindex/src/readers/SimpleDirectoryReader.edge.ts
@@ -79,7 +79,7 @@ export class SimpleDirectoryReader implements BaseReader {
       filePathQueue.push(filePath);
     }
 
-    const processFileParams: ProcessFileParams = {
+    const processFileParams = <ProcessFileParams>{
       defaultReader,
       fileExtToReader,
       overrideReader,
@@ -120,7 +120,7 @@ export class SimpleDirectoryReader implements BaseReader {
       if (params.overrideReader) {
         reader = params.overrideReader;
       } else if (params.fileExtToReader && fileExt in params.fileExtToReader) {
-        reader = params.fileExtToReader[fileExt];
+        reader = params.fileExtToReader[fileExt]!;
       } else if (params.defaultReader != null) {
         reader = params.defaultReader;
       } else {
diff --git a/packages/llamaindex/src/storage/chatStore/SimpleChatStore.ts b/packages/llamaindex/src/storage/chatStore/SimpleChatStore.ts
index b094a961ed133c0262b733174c3bb59a36201421..0f13ed6fbf47d2445cf3517975a66c56b2d2a403 100644
--- a/packages/llamaindex/src/storage/chatStore/SimpleChatStore.ts
+++ b/packages/llamaindex/src/storage/chatStore/SimpleChatStore.ts
@@ -34,7 +34,7 @@ export class SimpleChatStore<
     if (!(key in this.store)) {
       return null;
     }
-    const messages = this.store[key];
+    const messages = this.store[key]!;
     delete this.store[key];
     return messages;
   }
@@ -43,10 +43,10 @@ export class SimpleChatStore<
     if (!(key in this.store)) {
       return null;
     }
-    if (idx >= this.store[key].length) {
+    if (idx >= this.store[key]!.length) {
       return null;
     }
-    return this.store[key].splice(idx, 1)[0];
+    return this.store[key]!.splice(idx, 1)[0]!;
   }
 
   public deleteLastMessage(key: string) {
@@ -54,7 +54,7 @@ export class SimpleChatStore<
       return null;
     }
 
-    const lastMessage = this.store[key].pop();
+    const lastMessage = this.store[key]!.pop();
 
     return lastMessage || null;
   }
diff --git a/packages/llamaindex/src/storage/docStore/KVDocumentStore.ts b/packages/llamaindex/src/storage/docStore/KVDocumentStore.ts
index 0e65af62ae90c5f33c088487562e9bba17901692..223969b61cc09b4a9147047b0d95723a57fcadcc 100644
--- a/packages/llamaindex/src/storage/docStore/KVDocumentStore.ts
+++ b/packages/llamaindex/src/storage/docStore/KVDocumentStore.ts
@@ -42,7 +42,7 @@ export class KVDocumentStore extends BaseDocumentStore {
     allowUpdate: boolean = true,
   ): Promise<void> {
     for (let idx = 0; idx < docs.length; idx++) {
-      const doc = docs[idx];
+      const doc = docs[idx]!;
       if (doc.id_ === null) {
         throw new Error("doc_id not set");
       }
diff --git a/packages/llamaindex/src/storage/docStore/types.ts b/packages/llamaindex/src/storage/docStore/types.ts
index eed71e34351029d3a8c00f5e6fbc8af25b14341d..8125322e5d921f4950cb77f039481d5c68ddc905 100644
--- a/packages/llamaindex/src/storage/docStore/types.ts
+++ b/packages/llamaindex/src/storage/docStore/types.ts
@@ -65,7 +65,7 @@ export abstract class BaseDocumentStore {
   }): Promise<Record<number, BaseNode>> {
     const result: Record<number, BaseNode> = {};
     for (const index in nodeIdDict) {
-      result[index] = await this.getNode(nodeIdDict[index]);
+      result[index] = await this.getNode(nodeIdDict[index]!);
     }
     return result;
   }
diff --git a/packages/llamaindex/src/storage/kvStore/SimpleKVStore.ts b/packages/llamaindex/src/storage/kvStore/SimpleKVStore.ts
index 38b3335285c7fe9bf46064fb9a17550f833f92c1..8332bf7e69d37d9c03cec220488cbfc4c06e5cd2 100644
--- a/packages/llamaindex/src/storage/kvStore/SimpleKVStore.ts
+++ b/packages/llamaindex/src/storage/kvStore/SimpleKVStore.ts
@@ -20,7 +20,7 @@ export class SimpleKVStore extends BaseKVStore {
     if (!(collection in this.data)) {
       this.data[collection] = {};
     }
-    this.data[collection][key] = structuredClone(val); // Creating a shallow copy of the object
+    this.data[collection]![key] = structuredClone(val); // Creating a shallow copy of the object
 
     if (this.persistPath) {
       await this.persist(this.persistPath);
@@ -42,15 +42,15 @@ export class SimpleKVStore extends BaseKVStore {
   }
 
   async getAll(collection: string = DEFAULT_COLLECTION): Promise<DataType> {
-    return structuredClone(this.data[collection]); // Creating a shallow copy of the object
+    return structuredClone(this.data[collection]!); // Creating a shallow copy of the object
   }
 
   async delete(
     key: string,
     collection: string = DEFAULT_COLLECTION,
   ): Promise<boolean> {
-    if (key in this.data[collection]) {
-      delete this.data[collection][key];
+    if (key in this.data[collection]!) {
+      delete this.data[collection]![key];
       if (this.persistPath) {
         await this.persist(this.persistPath);
       }
diff --git a/packages/llamaindex/src/storage/vectorStore/AstraDBVectorStore.ts b/packages/llamaindex/src/storage/vectorStore/AstraDBVectorStore.ts
index bc1da63a5b9a7aec291fa679cb45c0bef4e92bc3..552b94f6c659e50cbf4f8991e2d6ad16ece18aa8 100644
--- a/packages/llamaindex/src/storage/vectorStore/AstraDBVectorStore.ts
+++ b/packages/llamaindex/src/storage/vectorStore/AstraDBVectorStore.ts
@@ -1,4 +1,9 @@
-import { Collection, DataAPIClient, Db } from "@datastax/astra-db-ts";
+import {
+  Collection,
+  DataAPIClient,
+  Db,
+  type FindOptions,
+} from "@datastax/astra-db-ts";
 import type { BaseNode } from "@llamaindex/core/schema";
 import { MetadataMode } from "@llamaindex/core/schema";
 import { getEnv } from "@llamaindex/env";
@@ -183,7 +188,7 @@ export class AstraDBVectorStore
       filters[f.key] = f.value;
     });
 
-    const cursor = await collection.find(filters, {
+    const cursor = await collection.find(filters, <FindOptions>{
       ...options,
       sort: query.queryEmbedding
         ? { $vector: query.queryEmbedding }
diff --git a/packages/llamaindex/src/storage/vectorStore/ChromaVectorStore.ts b/packages/llamaindex/src/storage/vectorStore/ChromaVectorStore.ts
index da2e70a6e1b717951e062e394258ab5d4fcbe225..46c319bb4d6e6c37f8d3f2879e0d2cb704cb2092 100644
--- a/packages/llamaindex/src/storage/vectorStore/ChromaVectorStore.ts
+++ b/packages/llamaindex/src/storage/vectorStore/ChromaVectorStore.ts
@@ -1,14 +1,17 @@
 import type { BaseNode } from "@llamaindex/core/schema";
 import { MetadataMode } from "@llamaindex/core/schema";
-import type {
-  AddParams,
-  ChromaClientParams,
-  Collection,
-  QueryResponse,
-  Where,
-  WhereDocument,
+import {
+  ChromaClient,
+  IncludeEnum,
+  type AddParams,
+  type ChromaClientParams,
+  type Collection,
+  type DeleteParams,
+  type QueryParams,
+  type QueryResponse,
+  type Where,
+  type WhereDocument,
 } from "chromadb";
-import { ChromaClient, IncludeEnum } from "chromadb";
 import {
   VectorStoreBase,
   VectorStoreQueryMode,
@@ -96,7 +99,7 @@ export class ChromaVectorStore
     deleteOptions?: ChromaDeleteOptions,
   ): Promise<void> {
     const collection = await this.getCollection();
-    await collection.delete({
+    await collection.delete(<DeleteParams>{
       ids: [refDocId],
       where: deleteOptions?.where,
       whereDocument: deleteOptions?.whereDocument,
@@ -144,7 +147,7 @@ export class ChromaVectorStore
     }
 
     const collection = await this.getCollection();
-    const queryResponse: QueryResponse = await collection.query({
+    const queryResponse: QueryResponse = await collection.query(<QueryParams>{
       queryEmbeddings: query.queryEmbedding ?? undefined,
       queryTexts: query.queryStr ?? undefined,
       nResults: query.similarityTopK,
@@ -160,17 +163,17 @@ export class ChromaVectorStore
     });
 
     const vectorStoreQueryResult: VectorStoreQueryResult = {
-      nodes: queryResponse.ids[0].map((id, index) => {
-        const text = (queryResponse.documents as string[][])[0][index];
-        const metaData = queryResponse.metadatas[0][index] ?? {};
+      nodes: queryResponse.ids[0]!.map((id, index) => {
+        const text = (queryResponse.documents as string[][])[0]![index];
+        const metaData = queryResponse.metadatas[0]![index] ?? {};
         const node = metadataDictToNode(metaData);
         node.setContent(text);
         return node;
       }),
-      similarities: (queryResponse.distances as number[][])[0].map(
+      similarities: (queryResponse.distances as number[][])[0]!.map(
         (distance) => 1 - distance,
       ),
-      ids: queryResponse.ids[0],
+      ids: queryResponse.ids[0]!,
     };
     return vectorStoreQueryResult;
   }
diff --git a/packages/llamaindex/src/storage/vectorStore/MilvusVectorStore.ts b/packages/llamaindex/src/storage/vectorStore/MilvusVectorStore.ts
index 46f9cd51d48a6ea09766f4eef036d20854015a63..1a540d49fff93097a146f34a414e381c4f5f572f 100644
--- a/packages/llamaindex/src/storage/vectorStore/MilvusVectorStore.ts
+++ b/packages/llamaindex/src/storage/vectorStore/MilvusVectorStore.ts
@@ -1,12 +1,13 @@
-/* eslint-disable turbo/no-undeclared-env-vars */
 import type { ChannelOptions } from "@grpc/grpc-js";
 import { BaseNode, MetadataMode, type Metadata } from "@llamaindex/core/schema";
+import { getEnv } from "@llamaindex/env";
 import {
   DataType,
   MilvusClient,
   type ClientConfig,
   type DeleteReq,
   type RowData,
+  type SearchSimpleReq,
 } from "@zilliz/milvus2-sdk-node";
 import {
   VectorStoreBase,
@@ -110,10 +111,10 @@ export class MilvusVectorStore
       this.milvusClient = init.milvusClient;
     } else {
       const configOrAddress =
-        init?.params?.configOrAddress ?? process.env.MILVUS_ADDRESS;
-      const ssl = init?.params?.ssl ?? process.env.MILVUS_SSL === "true";
-      const username = init?.params?.username ?? process.env.MILVUS_USERNAME;
-      const password = init?.params?.password ?? process.env.MILVUS_PASSWORD;
+        init?.params?.configOrAddress ?? getEnv("MILVUS_ADDRESS");
+      const ssl = init?.params?.ssl ?? getEnv("MILVUS_SSL") === "true";
+      const username = init?.params?.username ?? getEnv("MILVUS_USERNAME");
+      const password = init?.params?.password ?? getEnv("MILVUS_PASSWORD");
 
       if (!configOrAddress) {
         throw new Error("Must specify MILVUS_ADDRESS via env variable.");
@@ -249,7 +250,7 @@ export class MilvusVectorStore
   ): Promise<VectorStoreQueryResult> {
     await this.ensureCollection();
 
-    const found = await this.milvusClient.search({
+    const found = await this.milvusClient.search(<SearchSimpleReq>{
       collection_name: this.collectionName,
       limit: query.similarityTopK,
       vector: query.queryEmbedding,
diff --git a/packages/llamaindex/src/storage/vectorStore/MongoDBAtlasVectorStore.ts b/packages/llamaindex/src/storage/vectorStore/MongoDBAtlasVectorStore.ts
index 66a4f7fa0b434c40ea4a92f30fd6468e382fd64d..b732a7b912a2cdcf34f7d4b26caa66ba9e8ae87b 100644
--- a/packages/llamaindex/src/storage/vectorStore/MongoDBAtlasVectorStore.ts
+++ b/packages/llamaindex/src/storage/vectorStore/MongoDBAtlasVectorStore.ts
@@ -49,7 +49,7 @@ function toMongoDBFilter(filters?: MetadataFilters): Record<string, any> {
   });
 
   if (filters.filters.length === 1) {
-    return createFilterObject(filters.filters[0]);
+    return createFilterObject(filters.filters[0]!);
   }
 
   if (filters.condition === FilterCondition.AND) {
@@ -123,7 +123,7 @@ export class MongoDBAtlasVectorSearch
   /**
    * Options to pass to the insertMany function when adding nodes.
    */
-  insertOptions?: BulkWriteOptions;
+  insertOptions?: BulkWriteOptions | undefined;
 
   /**
    * Function to determine the number of candidates to retrieve for a given query.
diff --git a/packages/llamaindex/src/storage/vectorStore/PGVectorStore.ts b/packages/llamaindex/src/storage/vectorStore/PGVectorStore.ts
index f4bfff31f387ee7b09c077e90da995f1bb6f3fad..ec4556aeca4e63260e93afb191de70865b4d34b7 100644
--- a/packages/llamaindex/src/storage/vectorStore/PGVectorStore.ts
+++ b/packages/llamaindex/src/storage/vectorStore/PGVectorStore.ts
@@ -173,7 +173,7 @@ export class PGVectorStore
   private getDataToInsert(embeddingResults: BaseNode<Metadata>[]) {
     const result = [];
     for (let index = 0; index < embeddingResults.length; index++) {
-      const row = embeddingResults[index];
+      const row = embeddingResults[index]!;
 
       const id: any = row.id_.length ? row.id_ : null;
       const meta = row.metadata || {};
diff --git a/packages/llamaindex/src/storage/vectorStore/QdrantVectorStore.ts b/packages/llamaindex/src/storage/vectorStore/QdrantVectorStore.ts
index b37352b6b118f58a516f94363f547fa88b7f8abe..20612481b2b5123b0452e87ecf6aa858ee206033 100644
--- a/packages/llamaindex/src/storage/vectorStore/QdrantVectorStore.ts
+++ b/packages/llamaindex/src/storage/vectorStore/QdrantVectorStore.ts
@@ -7,6 +7,7 @@ import {
   type VectorStoreQueryResult,
 } from "./types.js";
 
+import type { QdrantClientParams } from "@qdrant/js-client-rest";
 import { QdrantClient } from "@qdrant/js-client-rest";
 import { metadataDictToNode, nodeToMetadata } from "./utils.js";
 
@@ -74,7 +75,7 @@ export class QdrantVectorStore
     if (client) {
       this.db = client;
     } else {
-      this.db = new QdrantClient({
+      this.db = new QdrantClient(<QdrantClientParams>{
         url: url,
         apiKey: apiKey,
       });
@@ -150,7 +151,7 @@ export class QdrantVectorStore
       const payloads = [];
 
       for (let j = 0; j < this.batchSize && i < nodes.length; j++, i++) {
-        const node = nodes[i];
+        const node = nodes[i]!;
 
         nodeIds.push(node);
 
@@ -163,9 +164,9 @@ export class QdrantVectorStore
 
       for (let k = 0; k < nodeIds.length; k++) {
         const point: PointStruct = {
-          id: nodeIds[k].id_,
-          payload: payloads[k],
-          vector: vectors[k],
+          id: nodeIds[k]!.id_,
+          payload: payloads[k]!,
+          vector: vectors[k]!,
         };
 
         points.push(point);
@@ -188,7 +189,7 @@ export class QdrantVectorStore
   async add(embeddingResults: BaseNode[]): Promise<string[]> {
     if (embeddingResults.length > 0 && !this.collectionInitialized) {
       await this.initializeCollection(
-        embeddingResults[0].getEmbedding().length,
+        embeddingResults[0]!.getEmbedding().length,
       );
     }
 
@@ -242,7 +243,7 @@ export class QdrantVectorStore
     const ids = [];
 
     for (let i = 0; i < response.length; i++) {
-      const item = response[i];
+      const item = response[i]!;
       const payload = item.payload;
 
       const node = metadataDictToNode(payload);
@@ -321,7 +322,7 @@ export class QdrantVectorStore
     const metadataFilters = query.filters.filters;
 
     for (let i = 0; i < metadataFilters.length; i++) {
-      const filter = metadataFilters[i];
+      const filter = metadataFilters[i]!;
 
       if (typeof filter.key === "number") {
         mustConditions.push({
diff --git a/packages/llamaindex/src/storage/vectorStore/SimpleVectorStore.ts b/packages/llamaindex/src/storage/vectorStore/SimpleVectorStore.ts
index 7c9c380dc13e28961f88c910b02d6ae28e4c36e3..19765d84f5e6fceaf86e7ba030b2af74414b10ef 100644
--- a/packages/llamaindex/src/storage/vectorStore/SimpleVectorStore.ts
+++ b/packages/llamaindex/src/storage/vectorStore/SimpleVectorStore.ts
@@ -11,7 +11,6 @@ import {
   FilterOperator,
   VectorStoreBase,
   VectorStoreQueryMode,
-  type IEmbedModel,
   type MetadataFilter,
   type MetadataFilters,
   type VectorStoreNoEmbedModel,
@@ -132,7 +131,10 @@ export class SimpleVectorStore
   private data: SimpleVectorStoreData;
   private persistPath: string | undefined;
 
-  constructor(init?: { data?: SimpleVectorStoreData } & Partial<IEmbedModel>) {
+  constructor(init?: {
+    data?: SimpleVectorStoreData | undefined;
+    embedModel?: BaseEmbedding | undefined;
+  }) {
     super(init?.embedModel);
     this.data = init?.data || new SimpleVectorStoreData();
   }
@@ -150,7 +152,7 @@ export class SimpleVectorStore
   }
 
   async get(textId: string): Promise<number[]> {
-    return this.data.embeddingDict[textId];
+    return this.data.embeddingDict[textId]!;
   }
 
   async add(embeddingResults: BaseNode[]): Promise<string[]> {
diff --git a/packages/llamaindex/src/storage/vectorStore/WeaviateVectorStore.ts b/packages/llamaindex/src/storage/vectorStore/WeaviateVectorStore.ts
index 7daef1683dbbfb55823d33d7ab3bb17e40fe50d4..dc13cb614eb45697a78ab94fe9bd871022208a44 100644
--- a/packages/llamaindex/src/storage/vectorStore/WeaviateVectorStore.ts
+++ b/packages/llamaindex/src/storage/vectorStore/WeaviateVectorStore.ts
@@ -1,4 +1,3 @@
-/* eslint-disable turbo/no-undeclared-env-vars */
 import { BaseNode, MetadataMode, type Metadata } from "@llamaindex/core/schema";
 import weaviate, {
   Filters,
@@ -9,6 +8,8 @@ import weaviate, {
   type WeaviateNonGenericObject,
 } from "weaviate-client";
 
+import { getEnv } from "@llamaindex/env";
+import type { BaseHybridOptions } from "weaviate-client";
 import {
   VectorStoreBase,
   VectorStoreQueryMode,
@@ -113,7 +114,7 @@ const toWeaviateFilter = (
   const filtersList = standardFilters.filters.map((filter) =>
     buildFilterItem(collection, filter),
   );
-  if (filtersList.length === 1) return filtersList[0];
+  if (filtersList.length === 1) return filtersList[0]!;
   const condition = standardFilters.condition ?? "and";
   return Filters[condition](...filtersList);
 };
@@ -157,8 +158,8 @@ export class WeaviateVectorStore
     } else {
       // Load client cloud options from config or env
       const clusterURL =
-        init?.cloudOptions?.clusterURL ?? process.env.WEAVIATE_CLUSTER_URL;
-      const apiKey = init?.cloudOptions?.apiKey ?? process.env.WEAVIATE_API_KEY;
+        init?.cloudOptions?.clusterURL ?? getEnv("WEAVIATE_CLUSTER_URL");
+      const apiKey = init?.cloudOptions?.apiKey ?? getEnv("WEAVIATE_API_KEY");
       if (!clusterURL || !apiKey) {
         throw new Error(
           "Must specify WEAVIATE_CLUSTER_URL and WEAVIATE_API_KEY via env variable.",
@@ -219,7 +220,11 @@ export class WeaviateVectorStore
     );
   }
 
-  public async query(query: VectorStoreQuery): Promise<VectorStoreQueryResult> {
+  public async query(
+    query: VectorStoreQuery & {
+      queryStr: string;
+    },
+  ): Promise<VectorStoreQueryResult> {
     const collection = await this.ensureCollection();
     const allProperties = await this.getAllProperties();
 
@@ -235,15 +240,29 @@ export class WeaviateVectorStore
       filters = toWeaviateFilter(collection, query.filters);
     }
 
-    const queryResult = await collection.query.hybrid(query.queryStr!, {
-      vector: query.queryEmbedding,
-      alpha: this.getQueryAlpha(query),
-      limit: query.similarityTopK,
+    const hybridOptions: BaseHybridOptions<undefined> = {
       returnMetadata: Object.values(SIMILARITY_KEYS),
       returnProperties: allProperties,
       includeVector: true,
-      filters,
-    });
+    };
+    const alpha = this.getQueryAlpha(query);
+    if (query.queryEmbedding) {
+      hybridOptions.vector = query.queryEmbedding;
+    }
+    if (query.similarityTopK) {
+      hybridOptions.limit = query.similarityTopK;
+    }
+    if (alpha) {
+      hybridOptions.alpha = alpha;
+    }
+    if (filters) {
+      hybridOptions.filters = filters;
+    }
+
+    const queryResult = await collection.query.hybrid(
+      query.queryStr,
+      hybridOptions,
+    );
 
     const entries = queryResult.objects;
 
@@ -320,7 +339,7 @@ export class WeaviateVectorStore
   }
 
   private checkIndexName(indexName?: string) {
-    if (indexName && indexName[0] !== indexName[0].toUpperCase()) {
+    if (indexName && indexName[0] !== indexName[0]!.toUpperCase()) {
       throw new Error(
         "Index name must start with a capital letter, e.g. 'LlamaIndex'",
       );
diff --git a/packages/llamaindex/src/storage/vectorStore/types.ts b/packages/llamaindex/src/storage/vectorStore/types.ts
index 475ab67d077b31c3b1162989bccdd8664a4049f3..7c726567b03bc575ba2206bd7c577f473eae74a3 100644
--- a/packages/llamaindex/src/storage/vectorStore/types.ts
+++ b/packages/llamaindex/src/storage/vectorStore/types.ts
@@ -72,7 +72,7 @@ export interface VectorStoreQuery {
   queryStr?: string;
   mode: VectorStoreQueryMode;
   alpha?: number;
-  filters?: MetadataFilters;
+  filters?: MetadataFilters | undefined;
   mmrThreshold?: number;
 }
 
diff --git a/packages/llamaindex/src/synthesizers/MultiModalResponseSynthesizer.ts b/packages/llamaindex/src/synthesizers/MultiModalResponseSynthesizer.ts
index 599f9eee8dd93dc634bad3e257cfa94eb7a9ac46..0d38964364f39860269241d2795136fa3b0c7e60 100644
--- a/packages/llamaindex/src/synthesizers/MultiModalResponseSynthesizer.ts
+++ b/packages/llamaindex/src/synthesizers/MultiModalResponseSynthesizer.ts
@@ -15,7 +15,7 @@ export class MultiModalResponseSynthesizer
   extends PromptMixin
   implements BaseSynthesizer
 {
-  serviceContext?: ServiceContext;
+  serviceContext?: ServiceContext | undefined;
   metadataMode: MetadataMode;
   textQATemplate: TextQAPrompt;
 
diff --git a/packages/llamaindex/src/synthesizers/ResponseSynthesizer.ts b/packages/llamaindex/src/synthesizers/ResponseSynthesizer.ts
index 78dee16961c0d1abb4cefb5d2c45e6b79886481d..d7d68be607026d6a9542bb6ac05b8503a357860b 100644
--- a/packages/llamaindex/src/synthesizers/ResponseSynthesizer.ts
+++ b/packages/llamaindex/src/synthesizers/ResponseSynthesizer.ts
@@ -24,9 +24,9 @@ export class ResponseSynthesizer
     serviceContext,
     metadataMode = MetadataMode.NONE,
   }: {
-    responseBuilder?: ResponseBuilder;
-    serviceContext?: ServiceContext;
-    metadataMode?: MetadataMode;
+    responseBuilder?: ResponseBuilder | undefined;
+    serviceContext?: ServiceContext | undefined;
+    metadataMode?: MetadataMode | undefined;
   } = {}) {
     super();
 
diff --git a/packages/llamaindex/src/synthesizers/builders.ts b/packages/llamaindex/src/synthesizers/builders.ts
index b8709bd28002114e94e667937e532b6e75b55bf8..65c9ac5b2d623b831c25fc16a86654b919570754 100644
--- a/packages/llamaindex/src/synthesizers/builders.ts
+++ b/packages/llamaindex/src/synthesizers/builders.ts
@@ -74,10 +74,10 @@ export class SimpleResponseBuilder
       context: textChunks.join("\n\n"),
     });
     if (stream) {
-      const response = await this.llm.complete({ prompt, stream });
+      const response = await this.llm.complete({ prompt, stream: true });
       return streamConverter(response, (chunk) => chunk.text);
     } else {
-      const response = await this.llm.complete({ prompt, stream });
+      const response = await this.llm.complete({ prompt, stream: false });
       return response.text;
     }
   }
@@ -144,7 +144,7 @@ export class Refine extends PromptMixin implements ResponseBuilder {
     let response: AsyncIterable<string> | string | undefined = prevResponse;
 
     for (let i = 0; i < textChunks.length; i++) {
-      const chunk = textChunks[i];
+      const chunk = textChunks[i]!;
       const lastChunk = i === textChunks.length - 1;
       if (!response) {
         response = await this.giveResponseSingle(
@@ -178,7 +178,7 @@ export class Refine extends PromptMixin implements ResponseBuilder {
     let response: AsyncIterable<string> | string | undefined = undefined;
 
     for (let i = 0; i < textChunks.length; i++) {
-      const chunk = textChunks[i];
+      const chunk = textChunks[i]!;
       const lastChunk = i === textChunks.length - 1;
       if (!response) {
         response = await this.complete({
@@ -216,7 +216,7 @@ export class Refine extends PromptMixin implements ResponseBuilder {
     let response: AsyncIterable<string> | string = initialReponse;
 
     for (let i = 0; i < textChunks.length; i++) {
-      const chunk = textChunks[i];
+      const chunk = textChunks[i]!;
       const lastChunk = i === textChunks.length - 1;
       response = await this.complete({
         prompt: refineTemplate.format({
@@ -341,7 +341,7 @@ export class TreeSummarize extends PromptMixin implements ResponseBuilder {
     if (packedTextChunks.length === 1) {
       const params = {
         prompt: this.summaryTemplate.format({
-          context: packedTextChunks[0],
+          context: packedTextChunks[0]!,
           query: extractText(query),
         }),
       };
diff --git a/packages/llamaindex/src/synthesizers/types.ts b/packages/llamaindex/src/synthesizers/types.ts
index a31e65cf67293d308e37af3f39450b4ae5c0162e..8c28ae332530e62e27cfb8993f4c82c27018bd69 100644
--- a/packages/llamaindex/src/synthesizers/types.ts
+++ b/packages/llamaindex/src/synthesizers/types.ts
@@ -22,7 +22,7 @@ export interface BaseSynthesizer extends PromptMixin {
 export interface ResponseBuilderQuery {
   query: QueryType;
   textChunks: string[];
-  prevResponse?: string;
+  prevResponse?: string | undefined;
 }
 
 /**
diff --git a/packages/llamaindex/tests/MetadataExtractors.test.ts b/packages/llamaindex/tests/MetadataExtractors.test.ts
index 76eb487cae11dc3009c712cf1d1d826ac153100c..fe0e76c95607cddef1cd9c22c64abcd12946baf5 100644
--- a/packages/llamaindex/tests/MetadataExtractors.test.ts
+++ b/packages/llamaindex/tests/MetadataExtractors.test.ts
@@ -58,7 +58,7 @@ describe("[MetadataExtractor]: Extractors should populate the metadata", () => {
 
     const nodesWithKeywordMetadata = await keywordExtractor.processNodes(nodes);
 
-    expect(nodesWithKeywordMetadata[0].metadata).toMatchObject({
+    expect(nodesWithKeywordMetadata[0]!.metadata).toMatchObject({
       excerptKeywords: DEFAULT_LLM_TEXT_OUTPUT,
     });
   });
@@ -77,7 +77,7 @@ describe("[MetadataExtractor]: Extractors should populate the metadata", () => {
 
     const nodesWithKeywordMetadata = await titleExtractor.processNodes(nodes);
 
-    expect(nodesWithKeywordMetadata[0].metadata).toMatchObject({
+    expect(nodesWithKeywordMetadata[0]!.metadata).toMatchObject({
       documentTitle: DEFAULT_LLM_TEXT_OUTPUT,
     });
   });
@@ -97,7 +97,7 @@ describe("[MetadataExtractor]: Extractors should populate the metadata", () => {
     const nodesWithKeywordMetadata =
       await questionsAnsweredExtractor.processNodes(nodes);
 
-    expect(nodesWithKeywordMetadata[0].metadata).toMatchObject({
+    expect(nodesWithKeywordMetadata[0]!.metadata).toMatchObject({
       questionsThisExcerptCanAnswer: DEFAULT_LLM_TEXT_OUTPUT,
     });
   });
@@ -115,7 +115,7 @@ describe("[MetadataExtractor]: Extractors should populate the metadata", () => {
 
     const nodesWithKeywordMetadata = await summaryExtractor.processNodes(nodes);
 
-    expect(nodesWithKeywordMetadata[0].metadata).toMatchObject({
+    expect(nodesWithKeywordMetadata[0]!.metadata).toMatchObject({
       sectionSummary: DEFAULT_LLM_TEXT_OUTPUT,
     });
   });
diff --git a/packages/llamaindex/tests/Selectors.test.ts b/packages/llamaindex/tests/Selectors.test.ts
index 8bf9ed18d0be8b5775ae974b0d231ce98dd40b44..4d4e804209d8b6b37df483365685192d4337ae7a 100644
--- a/packages/llamaindex/tests/Selectors.test.ts
+++ b/packages/llamaindex/tests/Selectors.test.ts
@@ -27,6 +27,6 @@ describe("LLMSelector", () => {
       "what is the best fruit?",
     );
 
-    expect(result.selections[0].reason).toBe("apple");
+    expect(result.selections[0]!.reason).toBe("apple");
   });
 });
diff --git a/packages/llamaindex/tests/indices/VectorStoreIndex.test.ts b/packages/llamaindex/tests/indices/VectorStoreIndex.test.ts
index 82acb55c52e9ed59118025a6b3607e27efdc2287..9bca51af7e9dfaae84789d8bee9618072bfec600 100644
--- a/packages/llamaindex/tests/indices/VectorStoreIndex.test.ts
+++ b/packages/llamaindex/tests/indices/VectorStoreIndex.test.ts
@@ -43,7 +43,7 @@ describe("VectorStoreIndex", () => {
 
   test("fromDocuments stores duplicates without a doc store strategy", async () => {
     const entries = await testStrategy(DocStoreStrategy.NONE);
-    expect(entries[0] + 1).toBe(entries[1]);
+    expect(entries[0]! + 1).toBe(entries[1]);
   });
 
   test("fromDocuments ignores duplicates with upserts doc store strategy", async () => {
diff --git a/packages/llamaindex/tests/indices/json-to-index-struct.test.ts b/packages/llamaindex/tests/indices/json-to-index-struct.test.ts
index 62e944c098df14597f8bcc57db4e258c7e09a771..6a74d5b1767696acd842695bd56ccf7cf35039f1 100644
--- a/packages/llamaindex/tests/indices/json-to-index-struct.test.ts
+++ b/packages/llamaindex/tests/indices/json-to-index-struct.test.ts
@@ -23,7 +23,7 @@ describe("jsonToIndexStruct", () => {
 
     expect(isIndexDict(actual)).toBe(true);
     expect(
-      (actual as IndexDict).nodesDict.nodeId.getContent(MetadataMode.NONE),
+      (actual as IndexDict).nodesDict.nodeId!.getContent(MetadataMode.NONE),
     ).toEqual("text");
   });
   it("transforms json to IndexList", () => {
diff --git a/packages/llamaindex/tests/postprocessors/MetadataReplacementPostProcessor.test.ts b/packages/llamaindex/tests/postprocessors/MetadataReplacementPostProcessor.test.ts
index 52c21c64ecdf1fd5a7c576c13366098fb864ff9f..e7afa0e3ad82fccf73269a5ad272c67d65d83580 100644
--- a/packages/llamaindex/tests/postprocessors/MetadataReplacementPostProcessor.test.ts
+++ b/packages/llamaindex/tests/postprocessors/MetadataReplacementPostProcessor.test.ts
@@ -21,15 +21,15 @@ describe("MetadataReplacementPostProcessor", () => {
   });
 
   test("Replaces the content of each node with specified metadata key if it exists", async () => {
-    nodes[0].node.metadata = { targetKey: "NewContent" };
+    nodes[0]!.node.metadata = { targetKey: "NewContent" };
     const newNodes = await postProcessor.postprocessNodes(nodes);
     // Check if node content was replaced correctly
-    expect(newNodes[0].node.getContent(MetadataMode.NONE)).toBe("NewContent");
+    expect(newNodes[0]!.node.getContent(MetadataMode.NONE)).toBe("NewContent");
   });
 
   test("Retains the original content of each node if no metadata key is found", async () => {
     const newNodes = await postProcessor.postprocessNodes(nodes);
     // Check if node content remained unchanged
-    expect(newNodes[0].node.getContent(MetadataMode.NONE)).toBe("OldContent");
+    expect(newNodes[0]!.node.getContent(MetadataMode.NONE)).toBe("OldContent");
   });
 });
diff --git a/packages/llamaindex/tests/readers/JSONReader.test.ts b/packages/llamaindex/tests/readers/JSONReader.test.ts
index 36e537bff04a04b2697ed480425e662053499efc..267f4b492e19aa64e43a267fb59f6d3603c34b0d 100644
--- a/packages/llamaindex/tests/readers/JSONReader.test.ts
+++ b/packages/llamaindex/tests/readers/JSONReader.test.ts
@@ -34,7 +34,7 @@ describe("JSONReader", () => {
     it("should load and parse valid JSON content", async () => {
       const docs = await reader.loadDataAsContent(content);
       expect(docs).toHaveLength(1);
-      expect(docs[0].text).toContain('"key1": "value1"');
+      expect(docs[0]!.text).toContain('"key1": "value1"');
     });
 
     it("should throw JSONParseError for invalid JSON content", async () => {
@@ -53,8 +53,8 @@ describe("JSONReader", () => {
       );
       const docs = await reader.loadDataAsContent(content);
       expect(docs).toHaveLength(2);
-      expect(docs[0].text).toBe('"key1": "value1"');
-      expect(docs[1].text).toBe('"key2": "value2"');
+      expect(docs[0]!.text).toBe('"key1": "value1"');
+      expect(docs[1]!.text).toBe('"key2": "value2"');
     });
 
     it("should skip empty lines in JSON Lines format", async () => {
@@ -64,8 +64,8 @@ describe("JSONReader", () => {
       );
       const docs = await reader.loadDataAsContent(content);
       expect(docs).toHaveLength(2);
-      expect(docs[0].text).toBe('"key1": "value1"');
-      expect(docs[1].text).toBe('"key2": "value2"');
+      expect(docs[0]!.text).toBe('"key1": "value1"');
+      expect(docs[1]!.text).toBe('"key2": "value2"');
     });
   });
 
@@ -74,14 +74,14 @@ describe("JSONReader", () => {
       reader = new JSONReader({ ensureAscii: true });
       const content = new TextEncoder().encode('{"key": "valüe"}');
       const docs = await reader.loadDataAsContent(content);
-      expect(docs[0].text).toBe('"key": "val\\u00fce"');
+      expect(docs[0]!.text).toBe('"key": "val\\u00fce"');
     });
 
     it("should not alter ASCII characters", async () => {
       reader = new JSONReader({ ensureAscii: true });
       const content = new TextEncoder().encode('{"key": "value"}');
       const docs = await reader.loadDataAsContent(content);
-      expect(docs[0].text).toBe('"key": "value"');
+      expect(docs[0]!.text).toBe('"key": "value"');
     });
   });
 
@@ -89,26 +89,26 @@ describe("JSONReader", () => {
     it("should create document with levelsBack option", async () => {
       reader = new JSONReader({ levelsBack: 1 });
       const docs = await reader.loadDataAsContent(content);
-      expect(docs[0].text).toContain("key1 value1");
-      expect(docs[0].text).toContain("c d");
+      expect(docs[0]!.text).toContain("key1 value1");
+      expect(docs[0]!.text).toContain("c d");
     });
 
     it("should traverse all levels with levelsBack 0", async () => {
       reader = new JSONReader({ levelsBack: 0 });
       const docs = await reader.loadDataAsContent(content);
-      expect(docs[0].text).toContain("a 1 key1 value1");
-      expect(docs[0].text).toContain("a 2 key2 value2");
-      expect(docs[0].text).toContain("b c d");
+      expect(docs[0]!.text).toContain("a 1 key1 value1");
+      expect(docs[0]!.text).toContain("a 2 key2 value2");
+      expect(docs[0]!.text).toContain("b c d");
     });
   });
   describe("collapseLength option", () => {
     it("should collapse values based on collapseLength", async () => {
       reader = new JSONReader({ collapseLength: 10, levelsBack: 0 });
       const docs = await reader.loadDataAsContent(content);
-      expect(docs[0].text).toContain('a 1 key1 "value1"');
-      expect(docs[0].text).toContain('b {"c":"d"}');
-      expect(docs[0].metadata.traversal_data.collapse_length).toBe(10);
-      expect(docs[0].metadata.traversal_data.levels_back).toBe(0);
+      expect(docs[0]!.text).toContain('a 1 key1 "value1"');
+      expect(docs[0]!.text).toContain('b {"c":"d"}');
+      expect(docs[0]!.metadata.traversal_data.collapse_length).toBe(10);
+      expect(docs[0]!.metadata.traversal_data.levels_back).toBe(0);
     });
   });
 
@@ -116,14 +116,14 @@ describe("JSONReader", () => {
     it("should remove JSON structural characters", async () => {
       reader = new JSONReader({ cleanJson: true });
       const docs = await reader.loadDataAsContent(content);
-      expect(docs[0].text).toContain('"key1": "value1"');
-      expect(docs[0].text).toContain('"a": {');
+      expect(docs[0]!.text).toContain('"key1": "value1"');
+      expect(docs[0]!.text).toContain('"a": {');
     });
 
     it("should not remove JSON structural characters, but white spaces", async () => {
       reader = new JSONReader({ cleanJson: false });
       const docs = await reader.loadDataAsContent(content);
-      expect(docs[0].text).toBe(
+      expect(docs[0]!.text).toBe(
         '{"a":{"1":{"key1":"value1"},"2":{"key2":"value2"}},"b":{"c":"d"}}',
       );
     });
diff --git a/packages/llamaindex/tests/readers/pdf-reader.test.ts b/packages/llamaindex/tests/readers/pdf-reader.test.ts
index 6945167c2de785a143bc16551c84af09b433f9c8..7d16d2fc9b3142d3900d9a2024c41f8f3b56dfc3 100644
--- a/packages/llamaindex/tests/readers/pdf-reader.test.ts
+++ b/packages/llamaindex/tests/readers/pdf-reader.test.ts
@@ -6,13 +6,13 @@ describe("pdf reader", () => {
   test("basic.pdf", async () => {
     const documents = await reader.loadData("../../../examples/data/basic.pdf");
     expect(documents.length).toBe(1);
-    expect(documents[0].metadata).toMatchObject({
+    expect(documents[0]!.metadata).toMatchObject({
       file_path: expect.any(String),
       file_name: "basic.pdf",
       page_number: 1,
       total_pages: 1,
     });
-    await expect(documents[0].text).toMatchFileSnapshot(
+    await expect(documents[0]!.text).toMatchFileSnapshot(
       "./.snap/basic.pdf.snap",
     );
   });
diff --git a/packages/llamaindex/tests/type.test.ts b/packages/llamaindex/tests/type.test.ts
index 3612f0c00d7cd8ed6ff693024cf8c18d934ef582..ba6cb1a9d32a95dfb4fe08f9a6a0fa9f2f166007 100644
--- a/packages/llamaindex/tests/type.test.ts
+++ b/packages/llamaindex/tests/type.test.ts
@@ -11,7 +11,7 @@ test("chat message type", () => {
   expectTypeOf<ChatMessage>().toMatchTypeOf<{
     content: MessageContent;
     role: MessageType;
-    options?: object;
+    options?: object | undefined;
   }>();
   expectTypeOf<ChatMessage>().not.toMatchTypeOf<{
     content: MessageContent;
@@ -25,7 +25,7 @@ test("chat message type", () => {
   expectTypeOf<ChatMessage<Options>>().toMatchTypeOf<{
     content: MessageContent;
     role: MessageType;
-    options?: Options;
+    options?: Options | undefined;
   }>();
 });
 
diff --git a/packages/llamaindex/tests/vectorStores/MilvusVectorStore.test.ts b/packages/llamaindex/tests/vectorStores/MilvusVectorStore.test.ts
index 7c2c5e50a9f990b414fb0660055d8d269f03b9bb..4de352313d90a5fdd043acaea8bd9fa371f6d373 100644
--- a/packages/llamaindex/tests/vectorStores/MilvusVectorStore.test.ts
+++ b/packages/llamaindex/tests/vectorStores/MilvusVectorStore.test.ts
@@ -325,7 +325,7 @@ describe("MilvusVectorStore", () => {
 
     testcases.forEach((tc) => {
       it(`[Unsupported Operator] [${tc.title}] should throw error`, async () => {
-        const errorMsg = `Operator ${tc.filters?.filters[0].operator} is not supported.`;
+        const errorMsg = `Operator ${tc.filters?.filters[0]!.operator} is not supported.`;
         expect(() => store.toMilvusFilter(tc.filters)).toThrow(errorMsg);
       });
     });
diff --git a/packages/llamaindex/tests/vectorStores/QdrantVectorStore.test.ts b/packages/llamaindex/tests/vectorStores/QdrantVectorStore.test.ts
index e364c6893dd607f341cac954e39a22137329e9ee..d7d4e9a865f37b6b5e918cc388630269587df70d 100644
--- a/packages/llamaindex/tests/vectorStores/QdrantVectorStore.test.ts
+++ b/packages/llamaindex/tests/vectorStores/QdrantVectorStore.test.ts
@@ -70,7 +70,7 @@ describe("QdrantVectorStore", () => {
         const ids = await store.add(nodes);
 
         expect(mockInitializeCollection).toHaveBeenCalledWith(
-          nodes[0].getEmbedding().length,
+          nodes[0]!.getEmbedding().length,
         );
         expect(mockBuildPoints).toHaveBeenCalledWith(nodes);
         expect(mockQdrantClient.upsert).toHaveBeenCalled();
diff --git a/packages/wasm-tools/src/factory.ts b/packages/wasm-tools/src/factory.ts
index 062a0292115b3e4ebc4b15cd669560045237652f..c033b8e0497e00fdf5e3c7de9579c97c738d6806 100644
--- a/packages/wasm-tools/src/factory.ts
+++ b/packages/wasm-tools/src/factory.ts
@@ -108,8 +108,8 @@ export default class ToolFactory {
             mode: "no-cors",
             method: "GET",
           })
-            .then((fetched) => {
-              fetched.json().then((data) => {
+            .then(async (fetched) => {
+              await fetched.json().then((data) => {
                 console.log("Response from API call: ", data);
                 // Add callback to handle data if needed
                 return wasmInstance.exports.__newString(JSON.stringify(data));
@@ -133,7 +133,7 @@ export default class ToolFactory {
 
   private static configsToToolClass = (toolConfigs: BaseTool) => {
     return class implements BaseTool {
-      call = toolConfigs.call;
+      call = toolConfigs.call!;
       metadata: ToolMetadata;
       constructor(metadata: ToolMetadata) {
         this.metadata = metadata || toolConfigs.metadata;
diff --git a/packages/wasm-tools/src/utils/object.ts b/packages/wasm-tools/src/utils/object.ts
index 6e305d2a09766cbe46bc34d0bb789c5ff2ff039f..cee501e345308664667c4678785417290d668f1e 100644
--- a/packages/wasm-tools/src/utils/object.ts
+++ b/packages/wasm-tools/src/utils/object.ts
@@ -4,7 +4,7 @@ export const transformObject = (
 ) => {
   const newObj: Record<string, any> = {};
   for (const key in transfomer) {
-    newObj[key] = transfomer[key](obj[key]);
+    newObj[key] = transfomer[key]!(obj[key]);
   }
   return newObj;
 };
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index fe8df6f2d5c5f59e5b0c1d8f15e0cfd87f84678e..261c3e9876611c433d831a84d363faf889e2157e 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -336,9 +336,12 @@ importers:
 
   packages/cloud:
     devDependencies:
+      '@hey-api/client-fetch':
+        specifier: ^0.2.4
+        version: 0.2.4
       '@hey-api/openapi-ts':
-        specifier: ^0.52.11
-        version: 0.52.11(typescript@5.5.4)
+        specifier: ^0.53.0
+        version: 0.53.0(typescript@5.5.4)
       bunchee:
         specifier: 5.3.2
         version: 5.3.2(typescript@5.5.4)
@@ -2734,8 +2737,11 @@ packages:
   '@hapi/topo@5.1.0':
     resolution: {integrity: sha512-foQZKJig7Ob0BMAYBfcJk8d77QtOe7Wo4ox7ff1lQYoNNAb6jwcY1ncdoy2e9wQZzvNy7ODZCYJkK8kzmcAnAg==}
 
-  '@hey-api/openapi-ts@0.52.11':
-    resolution: {integrity: sha512-S3NrCQDxy7AtW5sx8OVoBaqpaYNqYsD0y6YNwhUXPUahbrW7Wxm/N4RIEsRtXVbcjUqdAjo1FmFmeyEKYziJkw==}
+  '@hey-api/client-fetch@0.2.4':
+    resolution: {integrity: sha512-SGTVAVw3PlKDLw+IyhNhb/jCH3P1P2xJzLxA8Kyz1g95HrkYOJdRpl9F5I7LLwo9aCIB7nwR2NrSeX7QaQD7vQ==}
+
+  '@hey-api/openapi-ts@0.53.0':
+    resolution: {integrity: sha512-5pDd/s0yHJniruYyKYmEsAMbY10Nh/EwhHlgIrdpQ1KZWQdyTbH/tn8rVHT5Mopr1dMuYX0kq0TzpjcNlvrROQ==}
     engines: {node: ^18.0.0 || >=20.0.0}
     hasBin: true
     peerDependencies:
@@ -14065,7 +14071,9 @@ snapshots:
     dependencies:
       '@hapi/hoek': 9.3.0
 
-  '@hey-api/openapi-ts@0.52.11(typescript@5.5.4)':
+  '@hey-api/client-fetch@0.2.4': {}
+
+  '@hey-api/openapi-ts@0.53.0(typescript@5.5.4)':
     dependencies:
       '@apidevtools/json-schema-ref-parser': 11.7.0
       c12: 1.11.1
diff --git a/tsconfig.json b/tsconfig.json
index 51be85d882532655131e015ec2ebbd8ce82e79a4..3a11d68c083c49212e2dd620109e5ac9f4e8b0a6 100644
--- a/tsconfig.json
+++ b/tsconfig.json
@@ -6,6 +6,9 @@
     "verbatimModuleSyntax": true,
     "esModuleInterop": true,
     "forceConsistentCasingInFileNames": true,
+    "noUncheckedIndexedAccess": true,
+    "strictNullChecks": true,
+    "exactOptionalPropertyTypes": true,
     "strict": true,
     "skipLibCheck": true,
     "outDir": "./lib",