{ "name": "@llamaindex/env", "description": "environment wrapper, supports all JS environment including node, deno, bun, edge runtime, and cloudflare worker", "version": "0.1.29", "type": "module", "types": "dist/index.d.ts", "module": "dist/index.js", "main": "dist/index.cjs", "keywords": [ "llm", "llama", "openai", "gpt", "data science", "prompt", "prompt engineering", "chatgpt", "machine learning", "ml", "embedding", "vectorstore", "data framework", "llamaindex" ], "exports": { ".": { "node": { "types": "./dist/index.d.ts", "import": "./dist/index.js", "require": "./dist/index.cjs", "default": "./dist/index.js" }, "workerd": { "types": "./dist/index.workerd.d.ts", "default": "./dist/index.workerd.js" }, "edge-light": { "types": "./dist/index.edge-light.d.ts", "default": "./dist/index.edge-light.js" }, "browser": { "types": "./dist/index.browser.d.ts", "default": "./dist/index.browser.js" }, "import": { "types": "./dist/index.d.ts", "default": "./dist/index.js" }, "require": { "types": "./dist/index.d.cts", "default": "./dist/index.cjs" } }, "./tokenizers": { "workerd": { "types": "./tokenizers/dist/index.workerd.d.ts", "default": "./tokenizers/dist/index.workerd.js" }, "edge-light": { "types": "./tokenizers/dist/index.edge-light.d.ts", "default": "./tokenizers/dist/index.edge-light.js" }, "browser": { "types": "./tokenizers/dist/index.browser.d.ts", "default": "./tokenizers/dist/index.browser.js" }, "import": { "types": "./tokenizers/dist/index.d.ts", "default": "./tokenizers/dist/index.js" }, "require": { "types": "./tokenizers/dist/index.d.cts", "default": "./tokenizers/dist/index.cjs" }, "default": "./tokenizers/dist/index.js" }, "./multi-model": { "workerd": { "types": "./multi-model/dist/index.workerd.d.ts", "default": "./multi-model/dist/index.workerd.js" }, "edge-light": { "types": "./multi-model/dist/index.edge-light.d.ts", "default": "./multi-model/dist/index.edge-light.js" }, "browser": { "types": "./multi-model/dist/index.browser.d.ts", "default": "./multi-model/dist/index.browser.js" }, "import": { "types": "./multi-model/dist/index.d.ts", "default": "./multi-model/dist/index.js" }, "require": { "types": "./multi-model/dist/index.d.cts", "default": "./multi-model/dist/index.cjs" }, "default": "./multi-model/dist/index.js" } }, "files": [ "tokenizers", "multi-model", "dist", "CHANGELOG.md", "!**/*.tsbuildinfo" ], "repository": { "type": "git", "url": "git+https://github.com/run-llama/LlamaIndexTS.git", "directory": "packages/env" }, "scripts": { "dev": "bunchee --watch", "build": "bunchee", "test": "vitest" }, "devDependencies": { "@huggingface/transformers": "^3.0.2", "@types/node": "^22.9.0", "@types/readable-stream": "^4.0.15", "bunchee": "6.4.0", "vitest": "^2.1.5" }, "dependencies": { "pathe": "^1.1.2", "@aws-crypto/sha256-js": "^5.2.0", "js-tiktoken": "^1.0.12" }, "peerDependencies": { "@huggingface/transformers": "^3.0.2", "gpt-tokenizer": "^2.5.0" }, "peerDependenciesMeta": { "@huggingface/transformers": { "optional": true }, "gpt-tokenizer": { "optional": true } } }