From 321c39ddc7f71da6f2e3c7f4a687ca20d9611b07 Mon Sep 17 00:00:00 2001
From: Alex Yang <himself65@outlook.com>
Date: Thu, 27 Jun 2024 09:58:00 -0700
Subject: [PATCH] fix: generate api as class (#988)

---
 .changeset/honest-monkeys-cross.md            |  6 +++
 packages/cloud/README.md                      |  2 +-
 packages/cloud/openapi-ts.config.ts           |  3 ++
 packages/cloud/src/api.ts                     |  3 --
 .../llamaindex/src/cloud/LlamaCloudIndex.ts   | 45 ++++++++++---------
 .../src/cloud/LlamaCloudRetriever.ts          | 18 ++++----
 packages/llamaindex/src/cloud/utils.ts        | 13 ++----
 7 files changed, 46 insertions(+), 44 deletions(-)
 create mode 100644 .changeset/honest-monkeys-cross.md

diff --git a/.changeset/honest-monkeys-cross.md b/.changeset/honest-monkeys-cross.md
new file mode 100644
index 000000000..72420b978
--- /dev/null
+++ b/.changeset/honest-monkeys-cross.md
@@ -0,0 +1,6 @@
+---
+"@llamaindex/cloud": patch
+"llamaindex": patch
+---
+
+fix: generate api as class
diff --git a/packages/cloud/README.md b/packages/cloud/README.md
index 08aabdf30..7564d831f 100644
--- a/packages/cloud/README.md
+++ b/packages/cloud/README.md
@@ -5,7 +5,7 @@
 ## Usage
 
 ```ts
-import { OpenAPI, Service } from "@llamaindex/cloud/api";
+import { OpenAPI } from "@llamaindex/cloud/api";
 OpenAPI.TOKEN = "YOUR_API_KEY";
 OpenAPI.BASE = "https://api.cloud.llamaindex.ai/";
 // ...
diff --git a/packages/cloud/openapi-ts.config.ts b/packages/cloud/openapi-ts.config.ts
index 39d60bac0..97dc9e0c2 100644
--- a/packages/cloud/openapi-ts.config.ts
+++ b/packages/cloud/openapi-ts.config.ts
@@ -9,6 +9,9 @@ export default defineConfig({
     format: "prettier",
     lint: "eslint",
   },
+  services: {
+    asClass: true,
+  },
   types: {
     enums: "javascript",
   },
diff --git a/packages/cloud/src/api.ts b/packages/cloud/src/api.ts
index 19dc0c8b1..5ec76921e 100644
--- a/packages/cloud/src/api.ts
+++ b/packages/cloud/src/api.ts
@@ -1,4 +1 @@
-import * as Service from "./client/services.gen";
-
 export * from "./client";
-export { Service };
diff --git a/packages/llamaindex/src/cloud/LlamaCloudIndex.ts b/packages/llamaindex/src/cloud/LlamaCloudIndex.ts
index a4efce454..65c0412de 100644
--- a/packages/llamaindex/src/cloud/LlamaCloudIndex.ts
+++ b/packages/llamaindex/src/cloud/LlamaCloudIndex.ts
@@ -11,7 +11,7 @@ import { getPipelineCreate } from "./config.js";
 import type { CloudConstructorParams } from "./constants.js";
 import { getAppBaseUrl, initService } from "./utils.js";
 
-import { OpenAPI, Service } from "@llamaindex/cloud/api";
+import { PipelinesService, ProjectsService } from "@llamaindex/cloud/api";
 import { getEnv } from "@llamaindex/env";
 import { Settings } from "../Settings.js";
 import { OpenAIEmbedding } from "../embeddings/OpenAIEmbedding.js";
@@ -40,9 +40,11 @@ export class LlamaCloudIndex {
 
     while (true) {
       const pipelineStatus =
-        await Service.getPipelineStatusApiV1PipelinesPipelineIdStatusGet({
-          pipelineId,
-        });
+        await PipelinesService.getPipelineStatusApiV1PipelinesPipelineIdStatusGet(
+          {
+            pipelineId,
+          },
+        );
 
       if (pipelineStatus.status === "SUCCESS") {
         if (verbose) {
@@ -90,7 +92,7 @@ export class LlamaCloudIndex {
 
       for (const doc of pendingDocs) {
         const { status } =
-          await Service.getPipelineDocumentStatusApiV1PipelinesPipelineIdDocumentsDocumentIdStatusGet(
+          await PipelinesService.getPipelineDocumentStatusApiV1PipelinesPipelineIdDocumentsDocumentIdStatusGet(
             { pipelineId, documentId: doc },
           );
 
@@ -135,7 +137,7 @@ export class LlamaCloudIndex {
     name: string,
     projectName: string,
   ): Promise<string> {
-    const pipelines = await Service.searchPipelinesApiV1PipelinesGet({
+    const pipelines = await PipelinesService.searchPipelinesApiV1PipelinesGet({
       projectName,
       pipelineName: name,
     });
@@ -156,6 +158,7 @@ export class LlamaCloudIndex {
         apiKey: getEnv("OPENAI_API_KEY"),
       }),
     ];
+    const apiUrl = getAppBaseUrl();
 
     const pipelineCreateParams = await getPipelineCreate({
       pipelineName: params.name,
@@ -164,7 +167,7 @@ export class LlamaCloudIndex {
       transformations: params.transformations ?? defaultTransformations,
     });
 
-    const project = await Service.upsertProjectApiV1ProjectsPut({
+    const project = await ProjectsService.upsertProjectApiV1ProjectsPut({
       requestBody: {
         name: params.projectName ?? "default",
       },
@@ -174,7 +177,7 @@ export class LlamaCloudIndex {
       throw new Error("Project ID should be defined");
     }
 
-    const pipeline = await Service.upsertPipelineApiV1PipelinesPut({
+    const pipeline = await PipelinesService.upsertPipelineApiV1PipelinesPut({
       projectId: project.id,
       requestBody: {
         name: params.name,
@@ -192,7 +195,7 @@ export class LlamaCloudIndex {
       console.log(`Created pipeline ${pipeline.id} with name ${params.name}`);
     }
 
-    await Service.upsertBatchPipelineDocumentsApiV1PipelinesPipelineIdDocumentsPut(
+    await PipelinesService.upsertBatchPipelineDocumentsApiV1PipelinesPipelineIdDocumentsPut(
       {
         pipelineId: pipeline.id,
         requestBody: params.documents.map((doc) => ({
@@ -207,9 +210,11 @@ export class LlamaCloudIndex {
 
     while (true) {
       const pipelineStatus =
-        await Service.getPipelineStatusApiV1PipelinesPipelineIdStatusGet({
-          pipelineId: pipeline.id,
-        });
+        await PipelinesService.getPipelineStatusApiV1PipelinesPipelineIdStatusGet(
+          {
+            pipelineId: pipeline.id,
+          },
+        );
 
       if (pipelineStatus.status === "SUCCESS") {
         console.info(
@@ -220,14 +225,14 @@ export class LlamaCloudIndex {
 
       if (pipelineStatus.status === "ERROR") {
         console.error(
-          `Some documents failed to ingest, check your pipeline logs at ${OpenAPI.BASE}/project/${project.id}/deploy/${pipeline.id}`,
+          `Some documents failed to ingest, check your pipeline logs at ${apiUrl}/project/${project.id}/deploy/${pipeline.id}`,
         );
         throw new Error("Some documents failed to ingest");
       }
 
       if (pipelineStatus.status === "PARTIAL_SUCCESS") {
         console.info(
-          `Documents ingestion partially succeeded, to check a more complete status check your pipeline at ${OpenAPI.BASE}/project/${project.id}/deploy/${pipeline.id}`,
+          `Documents ingestion partially succeeded, to check a more complete status check your pipeline at ${apiUrl}/project/${project.id}/deploy/${pipeline.id}`,
         );
         break;
       }
@@ -241,7 +246,7 @@ export class LlamaCloudIndex {
 
     if (params.verbose) {
       console.info(
-        `Ingestion completed, find your index at ${OpenAPI.BASE}/project/${project.id}/deploy/${pipeline.id}`,
+        `Ingestion completed, find your index at ${apiUrl}/project/${project.id}/deploy/${pipeline.id}`,
       );
     }
 
@@ -281,7 +286,7 @@ export class LlamaCloudIndex {
       throw new Error("We couldn't find the pipeline ID for the given name");
     }
 
-    await Service.createBatchPipelineDocumentsApiV1PipelinesPipelineIdDocumentsPost(
+    await PipelinesService.createBatchPipelineDocumentsApiV1PipelinesPipelineIdDocumentsPost(
       {
         pipelineId: pipelineId,
         requestBody: [
@@ -309,7 +314,7 @@ export class LlamaCloudIndex {
       throw new Error("We couldn't find the pipeline ID for the given name");
     }
 
-    await Service.deletePipelineDocumentApiV1PipelinesPipelineIdDocumentsDocumentIdDelete(
+    await PipelinesService.deletePipelineDocumentApiV1PipelinesPipelineIdDocumentsDocumentIdDelete(
       {
         pipelineId,
         documentId: document.id_,
@@ -320,10 +325,6 @@ export class LlamaCloudIndex {
   }
 
   async refreshDoc(document: Document) {
-    const appUrl = getAppBaseUrl(this.params.baseUrl);
-
-    const client = await initService({ ...this.params, baseUrl: appUrl });
-
     const pipelineId = await this.getPipelineId(
       this.params.name,
       this.params.projectName,
@@ -333,7 +334,7 @@ export class LlamaCloudIndex {
       throw new Error("We couldn't find the pipeline ID for the given name");
     }
 
-    await Service.upsertBatchPipelineDocumentsApiV1PipelinesPipelineIdDocumentsPut(
+    await PipelinesService.upsertBatchPipelineDocumentsApiV1PipelinesPipelineIdDocumentsPut(
       {
         pipelineId,
         requestBody: [
diff --git a/packages/llamaindex/src/cloud/LlamaCloudRetriever.ts b/packages/llamaindex/src/cloud/LlamaCloudRetriever.ts
index 698f94a52..1ae722f2f 100644
--- a/packages/llamaindex/src/cloud/LlamaCloudRetriever.ts
+++ b/packages/llamaindex/src/cloud/LlamaCloudRetriever.ts
@@ -1,7 +1,7 @@
 import {
   type MetadataFilters,
+  PipelinesService,
   type RetrievalParams,
-  Service,
   type TextNodeWithScore,
 } from "@llamaindex/cloud/api";
 import type { NodeWithScore } from "@llamaindex/core/schema";
@@ -51,7 +51,7 @@ export class LlamaCloudRetriever implements BaseRetriever {
     query,
     preFilters,
   }: RetrieveParams): Promise<NodeWithScore[]> {
-    const pipelines = await Service.searchPipelinesApiV1PipelinesGet({
+    const pipelines = await PipelinesService.searchPipelinesApiV1PipelinesGet({
       projectName: this.projectName,
       pipelineName: this.pipelineName,
     });
@@ -62,9 +62,10 @@ export class LlamaCloudRetriever implements BaseRetriever {
       );
     }
 
-    const pipeline = await Service.getPipelineApiV1PipelinesPipelineIdGet({
-      pipelineId: pipelines[0].id,
-    });
+    const pipeline =
+      await PipelinesService.getPipelineApiV1PipelinesPipelineIdGet({
+        pipelineId: pipelines[0].id,
+      });
 
     if (!pipeline) {
       throw new Error(
@@ -72,16 +73,15 @@ export class LlamaCloudRetriever implements BaseRetriever {
       );
     }
 
-    const results = await Service.runSearchApiV1PipelinesPipelineIdRetrievePost(
-      {
+    const results =
+      await PipelinesService.runSearchApiV1PipelinesPipelineIdRetrievePost({
         pipelineId: pipeline.id,
         requestBody: {
           ...this.retrieveParams,
           query: extractText(query),
           search_filters: preFilters as MetadataFilters,
         },
-      },
-    );
+      });
 
     return this.resultNodesToNodeWithScore(results.retrieval_nodes);
   }
diff --git a/packages/llamaindex/src/cloud/utils.ts b/packages/llamaindex/src/cloud/utils.ts
index 63e352264..e030fcd43 100644
--- a/packages/llamaindex/src/cloud/utils.ts
+++ b/packages/llamaindex/src/cloud/utils.ts
@@ -1,4 +1,4 @@
-import { OpenAPI, Service } from "@llamaindex/cloud/api";
+import { OpenAPI } from "@llamaindex/cloud/api";
 import { getEnv } from "@llamaindex/env";
 import type { ClientParams } from "./constants.js";
 import { DEFAULT_BASE_URL } from "./constants.js";
@@ -7,14 +7,11 @@ function getBaseUrl(baseUrl?: string): string {
   return baseUrl ?? getEnv("LLAMA_CLOUD_BASE_URL") ?? DEFAULT_BASE_URL;
 }
 
-export function getAppBaseUrl(baseUrl?: string): string {
-  return getBaseUrl(baseUrl).replace(/api\./, "");
+export function getAppBaseUrl(): string {
+  return OpenAPI.BASE.replace(/api\./, "");
 }
 
-export function initService({
-  apiKey,
-  baseUrl,
-}: ClientParams = {}): typeof Service {
+export function initService({ apiKey, baseUrl }: ClientParams = {}) {
   OpenAPI.TOKEN = apiKey ?? getEnv("LLAMA_CLOUD_API_KEY");
   OpenAPI.BASE = getBaseUrl(baseUrl);
   if (!OpenAPI.TOKEN) {
@@ -22,6 +19,4 @@ export function initService({
       "API Key is required for LlamaCloudIndex. Please pass the apiKey parameter",
     );
   }
-
-  return Service;
 }
-- 
GitLab