diff --git a/packages/core/e2e/fixtures/llm/anthropic.ts b/packages/core/e2e/fixtures/llm/anthropic.ts new file mode 100644 index 0000000000000000000000000000000000000000..8d5d01e5866bc287efaad37ee671f1b52fbe4cbc --- /dev/null +++ b/packages/core/e2e/fixtures/llm/anthropic.ts @@ -0,0 +1,3 @@ +import { OpenAI } from "./open_ai.js"; + +export class Anthropic extends OpenAI {} diff --git a/packages/core/e2e/node/claude.e2e.ts b/packages/core/e2e/node/claude.e2e.ts new file mode 100644 index 0000000000000000000000000000000000000000..23325866b83cc36f946f837c8756b8ceed36bd94 --- /dev/null +++ b/packages/core/e2e/node/claude.e2e.ts @@ -0,0 +1,45 @@ +import { consola } from "consola"; +import { Anthropic, Settings, type LLM } from "llamaindex"; +import { ok } from "node:assert"; +import { beforeEach, test } from "node:test"; +import { mockLLMEvent } from "./utils.js"; + +let llm: LLM; +beforeEach(async () => { + Settings.llm = new Anthropic({ + model: "claude-3-opus", + }); + llm = Settings.llm; +}); + +await test("anthropic llm", async (t) => { + await mockLLMEvent(t, "llm-anthropic"); + await t.test("llm.chat", async () => { + const response = await llm.chat({ + messages: [ + { + content: "Hello", + role: "user", + }, + ], + }); + consola.debug("response:", response); + ok(typeof response.message.content === "string"); + }); + + await t.test("stream llm.chat", async () => { + const iter = await llm.chat({ + stream: true, + messages: [ + { + content: "hello", + role: "user", + }, + ], + }); + for await (const chunk of iter) { + consola.debug("chunk:", chunk); + ok(typeof chunk.delta === "string"); + } + }); +}); diff --git a/packages/core/e2e/node/openai.e2e.ts b/packages/core/e2e/node/openai.e2e.ts index 2b4aa051539e7fae8e63f2e30518e60e99da4a9a..710ea5bc948ab78afa775399d0e705020bde503a 100644 --- a/packages/core/e2e/node/openai.e2e.ts +++ b/packages/core/e2e/node/openai.e2e.ts @@ -30,7 +30,7 @@ function divideNumbers({ a, b }: { a: number; b: number }) { return `${a / b}`; } -await test("llm", async (t) => { +await test("openai llm", async (t) => { await mockLLMEvent(t, "llm"); await t.test("llm.chat", async () => { const response = await llm.chat({ diff --git a/packages/core/e2e/node/snapshot/llm-anthropic.snap b/packages/core/e2e/node/snapshot/llm-anthropic.snap new file mode 100644 index 0000000000000000000000000000000000000000..6b18dccbdbcf7e232336009ab847b67893fa0c12 --- /dev/null +++ b/packages/core/e2e/node/snapshot/llm-anthropic.snap @@ -0,0 +1,290 @@ +{ + "llmEventStart": [ + { + "id": "PRESERVE_0", + "messages": [ + { + "content": "Hello", + "role": "user" + } + ] + }, + { + "id": "PRESERVE_1", + "messages": [ + { + "content": "hello", + "role": "user" + } + ] + } + ], + "llmEventEnd": [ + { + "id": "PRESERVE_0", + "response": { + "raw": { + "id": "HIDDEN", + "type": "message", + "role": "assistant", + "model": "claude-3-opus-20240229", + "stop_sequence": null, + "usage": { + "input_tokens": 8, + "output_tokens": 12 + }, + "content": [ + { + "type": "text", + "text": "Hello! How can I assist you today?" + } + ], + "stop_reason": "end_turn" + }, + "message": { + "content": "Hello! How can I assist you today?", + "role": "assistant" + } + } + }, + { + "id": "PRESERVE_1", + "response": { + "raw": [ + { + "raw": { + "type": "content_block_delta", + "index": 0, + "delta": { + "type": "text_delta", + "text": "Hello" + } + }, + "delta": "Hello" + }, + { + "raw": { + "type": "content_block_delta", + "index": 0, + "delta": { + "type": "text_delta", + "text": "!" + } + }, + "delta": "!" + }, + { + "raw": { + "type": "content_block_delta", + "index": 0, + "delta": { + "type": "text_delta", + "text": " How" + } + }, + "delta": " How" + }, + { + "raw": { + "type": "content_block_delta", + "index": 0, + "delta": { + "type": "text_delta", + "text": " can" + } + }, + "delta": " can" + }, + { + "raw": { + "type": "content_block_delta", + "index": 0, + "delta": { + "type": "text_delta", + "text": " I" + } + }, + "delta": " I" + }, + { + "raw": { + "type": "content_block_delta", + "index": 0, + "delta": { + "type": "text_delta", + "text": " assist" + } + }, + "delta": " assist" + }, + { + "raw": { + "type": "content_block_delta", + "index": 0, + "delta": { + "type": "text_delta", + "text": " you" + } + }, + "delta": " you" + }, + { + "raw": { + "type": "content_block_delta", + "index": 0, + "delta": { + "type": "text_delta", + "text": " today" + } + }, + "delta": " today" + }, + { + "raw": { + "type": "content_block_delta", + "index": 0, + "delta": { + "type": "text_delta", + "text": "?" + } + }, + "delta": "?" + } + ], + "message": { + "content": "Hello! How can I assist you today?", + "role": "assistant", + "options": {} + } + } + } + ], + "llmEventStream": [ + { + "id": "PRESERVE_1", + "chunk": { + "raw": { + "type": "content_block_delta", + "index": 0, + "delta": { + "type": "text_delta", + "text": "Hello" + } + }, + "delta": "Hello" + } + }, + { + "id": "PRESERVE_1", + "chunk": { + "raw": { + "type": "content_block_delta", + "index": 0, + "delta": { + "type": "text_delta", + "text": "!" + } + }, + "delta": "!" + } + }, + { + "id": "PRESERVE_1", + "chunk": { + "raw": { + "type": "content_block_delta", + "index": 0, + "delta": { + "type": "text_delta", + "text": " How" + } + }, + "delta": " How" + } + }, + { + "id": "PRESERVE_1", + "chunk": { + "raw": { + "type": "content_block_delta", + "index": 0, + "delta": { + "type": "text_delta", + "text": " can" + } + }, + "delta": " can" + } + }, + { + "id": "PRESERVE_1", + "chunk": { + "raw": { + "type": "content_block_delta", + "index": 0, + "delta": { + "type": "text_delta", + "text": " I" + } + }, + "delta": " I" + } + }, + { + "id": "PRESERVE_1", + "chunk": { + "raw": { + "type": "content_block_delta", + "index": 0, + "delta": { + "type": "text_delta", + "text": " assist" + } + }, + "delta": " assist" + } + }, + { + "id": "PRESERVE_1", + "chunk": { + "raw": { + "type": "content_block_delta", + "index": 0, + "delta": { + "type": "text_delta", + "text": " you" + } + }, + "delta": " you" + } + }, + { + "id": "PRESERVE_1", + "chunk": { + "raw": { + "type": "content_block_delta", + "index": 0, + "delta": { + "type": "text_delta", + "text": " today" + } + }, + "delta": " today" + } + }, + { + "id": "PRESERVE_1", + "chunk": { + "raw": { + "type": "content_block_delta", + "index": 0, + "delta": { + "type": "text_delta", + "text": "?" + } + }, + "delta": "?" + } + } + ] +} \ No newline at end of file diff --git a/packages/core/e2e/node/utils.ts b/packages/core/e2e/node/utils.ts index c0ca76fbf004178c78c7099b8576c9ad2491f9d2..7b7c6916bbe19d13b1ca229faf26fa1dd7456aef 100644 --- a/packages/core/e2e/node/utils.ts +++ b/packages/core/e2e/node/utils.ts @@ -1,3 +1,4 @@ +/* eslint-disable turbo/no-undeclared-env-vars */ import { Settings, type LLMEndEvent, diff --git a/packages/core/package.json b/packages/core/package.json index 90fc057f0e23e22fd84f2ba4c4b8c2783fbd5684..423edcd9669676ec7b51c867b4c6a00ce4e39cb8 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -5,7 +5,7 @@ "license": "MIT", "type": "module", "dependencies": { - "@anthropic-ai/sdk": "^0.18.0", + "@anthropic-ai/sdk": "^0.20.4", "@aws-crypto/sha256-js": "^5.2.0", "@datastax/astra-db-ts": "^0.1.4", "@grpc/grpc-js": "^1.10.6", diff --git a/packages/core/src/llm/anthropic.ts b/packages/core/src/llm/anthropic.ts index 640d1c7cc546813668e3dfac29312593179e2285..c6dfd54732408bc0287d6e7c15ce1732ce687c0d 100644 --- a/packages/core/src/llm/anthropic.ts +++ b/packages/core/src/llm/anthropic.ts @@ -1,7 +1,13 @@ import type { ClientOptions } from "@anthropic-ai/sdk"; import { Anthropic as SDKAnthropic } from "@anthropic-ai/sdk"; +import type { + Tool, + ToolUseBlock, +} from "@anthropic-ai/sdk/resources/beta/tools/messages"; +import type { TextBlock } from "@anthropic-ai/sdk/resources/index"; import { getEnv } from "@llamaindex/env"; import _ from "lodash"; +import type { BaseTool } from "../types.js"; import { BaseLLM } from "./base.js"; import type { ChatMessage, @@ -81,7 +87,21 @@ const AVAILABLE_ANTHROPIC_MODELS_WITHOUT_DATE: { [key: string]: string } = { "claude-3-haiku": "claude-3-haiku-20240307", } as { [key in keyof typeof ALL_AVAILABLE_ANTHROPIC_MODELS]: string }; -export class Anthropic extends BaseLLM { +export type AnthropicAdditionalChatOptions = {}; + +export type AnthropicAdditionalMessageOptions = + | { + toolCall: string; + } + | { + toolUse: ToolUseBlock; + } + | {}; + +export class Anthropic extends BaseLLM< + AnthropicAdditionalChatOptions, + AnthropicAdditionalMessageOptions +> { // Per completion Anthropic params model: keyof typeof ALL_AVAILABLE_ANTHROPIC_MODELS; temperature: number; @@ -154,7 +174,7 @@ export class Anthropic extends BaseLLM { ): Promise<ChatResponse | AsyncIterable<ChatResponseChunk>> { let { messages } = params; - const { stream } = params; + const { stream, tools } = params; let systemPrompt: string | null = null; @@ -169,25 +189,63 @@ export class Anthropic extends BaseLLM { messages = messages.filter((message) => message.role !== "system"); } - //Streaming + // case: Streaming if (stream) { + if (tools) { + console.error("Tools are not supported in streaming mode"); + } return this.streamChat(messages, systemPrompt); } + // case: Non-streaming + const anthropic = this.session.anthropic; + + if (tools) { + const response = await anthropic.beta.tools.messages.create({ + messages: this.formatMessages(messages), + tools: tools.map(Anthropic.toTool), + model: this.getModelName(this.model), + temperature: this.temperature, + max_tokens: this.maxTokens ?? 4096, + top_p: this.topP, + ...(systemPrompt && { system: systemPrompt }), + }); - //Non-streaming - const response = await this.session.anthropic.messages.create({ - model: this.getModelName(this.model), - messages: this.formatMessages(messages), - max_tokens: this.maxTokens ?? 4096, - temperature: this.temperature, - top_p: this.topP, - ...(systemPrompt && { system: systemPrompt }), - }); + const toolUseBlock = response.content.find( + (content): content is ToolUseBlock => content.type === "tool_use", + ); - return { - raw: response, - message: { content: response.content[0].text, role: "assistant" }, - }; + return { + raw: response, + message: { + content: response.content + .filter((content): content is TextBlock => content.type === "text") + .map((content) => ({ + type: "text", + text: content.text, + })), + role: "assistant", + options: toolUseBlock + ? { + toolUse: toolUseBlock, + } + : {}, + }, + }; + } else { + const response = await anthropic.messages.create({ + model: this.getModelName(this.model), + messages: this.formatMessages(messages), + max_tokens: this.maxTokens ?? 4096, + temperature: this.temperature, + top_p: this.topP, + ...(systemPrompt && { system: systemPrompt }), + }); + + return { + raw: response, + message: { content: response.content[0].text, role: "assistant" }, + }; + } } protected async *streamChat( @@ -219,4 +277,19 @@ export class Anthropic extends BaseLLM { } return; } + + static toTool(tool: BaseTool): Tool { + if (tool.metadata.parameters?.type !== "object") { + throw new TypeError("Tool parameters must be an object"); + } + return { + input_schema: { + type: "object", + properties: tool.metadata.parameters.properties, + required: tool.metadata.parameters.required, + }, + name: tool.metadata.name, + description: tool.metadata.description, + }; + } } diff --git a/packages/edge/package.json b/packages/edge/package.json index c9ed8081c4cdbe624ebb0cb7dfd667c2996db86c..6aa6deeb660d7933cd009c057de6fe6e6e4fa5c7 100644 --- a/packages/edge/package.json +++ b/packages/edge/package.json @@ -4,7 +4,7 @@ "license": "MIT", "type": "module", "dependencies": { - "@anthropic-ai/sdk": "^0.18.0", + "@anthropic-ai/sdk": "^0.20.4", "@aws-crypto/sha256-js": "^5.2.0", "@datastax/astra-db-ts": "^0.1.4", "@grpc/grpc-js": "^1.10.6", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index dbfe638b0aa3c4100e6e5f3febadd5f1274b2192..0c7419170af04f1bc719506f4fba207b084b0656 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -188,8 +188,8 @@ importers: packages/core: dependencies: '@anthropic-ai/sdk': - specifier: ^0.18.0 - version: 0.18.0 + specifier: ^0.20.4 + version: 0.20.4 '@aws-crypto/sha256-js': specifier: ^5.2.0 version: 5.2.0 @@ -351,8 +351,8 @@ importers: packages/edge: dependencies: '@anthropic-ai/sdk': - specifier: ^0.18.0 - version: 0.18.0 + specifier: ^0.20.4 + version: 0.20.4 '@aws-crypto/sha256-js': specifier: ^5.2.0 version: 5.2.0 @@ -788,14 +788,13 @@ packages: '@jridgewell/gen-mapping': 0.3.5 '@jridgewell/trace-mapping': 0.3.25 - /@anthropic-ai/sdk@0.18.0: - resolution: {integrity: sha512-3XsWEn/4nPGRd4AdSguugbSDFy6Z2AWTNOeI3iK+aV22+w23+vY9CEb3Hiy0kvKIQuxSmZz/+5WKC8nPWy8gVg==} + /@anthropic-ai/sdk@0.20.4: + resolution: {integrity: sha512-ULzz+0Smk9SNkAi1tcjJByxbt4taBhnQkRAB75iH0ku5dRwiPxGxN0WOWHoNIq22dGWBXJByrQRhVf80V4xAPA==} dependencies: '@types/node': 18.19.31 '@types/node-fetch': 2.6.11 abort-controller: 3.0.0 agentkeepalive: 4.5.0 - digest-fetch: 1.3.0 form-data-encoder: 1.7.2 formdata-node: 4.4.1 node-fetch: 2.7.0(encoding@0.1.13) @@ -5855,10 +5854,6 @@ packages: dev: false optional: true - /base-64@0.1.0: - resolution: {integrity: sha512-Y5gU45svrR5tI2Vt/X9GPd3L0HNIKzGu202EjxrXMpuc2V2CiKgemAbUUsqYmZJvPtCXoUKjNZwBJzsNScUbXA==} - dev: false - /base64-js@1.5.1: resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} @@ -6200,10 +6195,6 @@ packages: resolution: {integrity: sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==} dev: true - /charenc@0.0.2: - resolution: {integrity: sha512-yrLQ/yVUFXkzg7EDQsPieE/53+0RlaWTs+wBrvW36cyilJ2SaDWfl4Yj7MtLTXleV9uEKefbAGUPv2/iWSooRA==} - dev: false - /check-error@1.0.3: resolution: {integrity: sha512-iKEoDYaRmd1mxM90a2OEfWhjsjPpYPuQ+lMYsoxB126+t8fw7ySEO48nmDg5COTjxDI65/Y2OWpeEHk3ZOe8zg==} dependencies: @@ -6708,10 +6699,6 @@ packages: shebang-command: 2.0.0 which: 2.0.2 - /crypt@0.0.2: - resolution: {integrity: sha512-mCxBlsHFYh9C+HVpiEacem8FEBnMXgU9gy4zmNC+SXAZNB/1idgp/aulFJ4FgCi7GPEVbfyng092GqL2k2rmow==} - dev: false - /crypto-random-string@4.0.0: resolution: {integrity: sha512-x8dy3RnvYdlUcPOjkEHqozhiwzKNSq7GcPuXFbnyMOCHxX8V3OgIg/pYuabl2sbUPfIJaeAQB7PMOK8DFIdoRA==} engines: {node: '>=12'} @@ -7295,13 +7282,6 @@ packages: engines: {node: '>=0.3.1'} dev: true - /digest-fetch@1.3.0: - resolution: {integrity: sha512-CGJuv6iKNM7QyZlM2T3sPAdZWd/p9zQiRNS9G+9COUCwzWFTs0Xp8NF5iePx7wtvhDykReiRRrSeNb4oMmB8lA==} - dependencies: - base-64: 0.1.0 - md5: 2.3.0 - dev: false - /dingbat-to-unicode@1.0.1: resolution: {integrity: sha512-98l0sW87ZT58pU4i61wa2OHwxbiYSbuxsCBozaVnYX2iCnr3bLM3fIes1/ej7h1YdOKuKt/MLs706TVnALA65w==} dev: false @@ -9475,10 +9455,6 @@ packages: has-tostringtag: 1.0.0 dev: true - /is-buffer@1.1.6: - resolution: {integrity: sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==} - dev: false - /is-callable@1.2.7: resolution: {integrity: sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==} engines: {node: '>= 0.4'} @@ -10370,14 +10346,6 @@ packages: resolution: {integrity: sha512-sMG6JtX0ebcRMHxYTcmgsh0/m6o8hGdQHFE2OgjvflRZlQM51CGGj/uuk056D+12BlCiW0aTpt/AdlDNtgQiew==} dev: false - /md5@2.3.0: - resolution: {integrity: sha512-T1GITYmFaKuO91vxyoQMFETst+O71VUPEU3ze5GNzDm0OWdP8v1ziTaAEPUr/3kLsY3Sftgz242A1SetQiDL7g==} - dependencies: - charenc: 0.0.2 - crypt: 0.0.2 - is-buffer: 1.1.6 - dev: false - /mdast-util-directive@3.0.0: resolution: {integrity: sha512-JUpYOqKI4mM3sZcNxmF/ox04XYFFkNwr0CFlrQIkCwbvH0xzMCqkMqAde9wRd80VAhaUrwFwKm2nxretdT1h7Q==} dependencies: