From 37e8dabdceb5c3962aee1b4cd16a2de616356657 Mon Sep 17 00:00:00 2001 From: dloman118 <99347459+dloman118@users.noreply.github.com> Date: Tue, 11 Jun 2024 12:11:51 -0400 Subject: [PATCH] move all Groq examples to a new Groq folder --- .../Function-Calling-101-Ecommerce.ipynb | 0 .../customers.csv | 0 .../function-calling-101-ecommerce/orders.csv | 0 .../products.csv | 0 .../data/employees.csv | 0 .../data/purchases.csv | 0 .../json-mode-function-calling-for-sql.ipynb | 0 .../employees-without-purchases.yaml | 0 .../most-expensive-purchase.yaml | 0 .../most-recent-purchases.yaml | 0 .../verified-queries/number-of-teslas.yaml | 0 .../SDOH-Json-mode.ipynb | 0 .../clinical_notes/00456321.txt | 0 .../clinical_notes/00567289.txt | 0 .../clinical_notes/00678934.txt | 0 .../clinical_notes/00785642.txt | 0 .../clinical_notes/00893247.txt | 0 ...llama3-stock-market-function-calling.ipynb | 0 .../parallel-tool-use/parallel-tool-use.ipynb | 0 .../parallel-tool-use/requirements.txt | 0 .../presidential_speeches.csv | 0 .../rag-langchain-presidential-speeches.ipynb | 0 .../README.md | 0 .../conversational-chatbot-langchain/main.py | 0 .../requirements.txt | 0 .../crewai-agents/README.md | 0 .../crewai-agents/main.py | 0 .../crewai-agents/requirements.txt | 0 .../README.md | 0 .../main.py | 0 .../requirements.txt | 0 .../README.md | 0 .../main.py | 0 .../requirements.txt | 0 .../README.md | 0 .../main.py | 0 .../requirements.txt | 0 .../README.md | 0 .../main.py | 0 .../requirements.txt | 0 .../text-to-sql-json-mode/README.md | 0 .../text-to-sql-json-mode/data/employees.csv | 0 .../text-to-sql-json-mode/data/purchases.csv | 0 .../text-to-sql-json-mode/main.py | 0 .../prompts/base_prompt.txt | 0 .../text-to-sql-json-mode/requirements.txt | 0 .../verified-sql-function-calling/README.md | 0 .../data/employees.csv | 0 .../data/purchases.csv | 0 .../verified-sql-function-calling/main.py | 0 .../requirements.txt | 0 .../employees-without-purchases.yaml | 0 .../most-expensive-purchase.yaml | 0 .../most-recent-purchases.yaml | 0 .../verified-queries/number-of-teslas.yaml | 0 .../Groq/llama3_cookbook_groq.ipynb | 1708 +++++++++++++++++ .../llama3_cookbook_groq.ipynb | 937 --------- 57 files changed, 1708 insertions(+), 937 deletions(-) rename recipes/llama_api_providers/{ => Groq}/groq-api-cookbook/function-calling-101-ecommerce/Function-Calling-101-Ecommerce.ipynb (100%) rename recipes/llama_api_providers/{ => Groq}/groq-api-cookbook/function-calling-101-ecommerce/customers.csv (100%) rename recipes/llama_api_providers/{ => Groq}/groq-api-cookbook/function-calling-101-ecommerce/orders.csv (100%) rename recipes/llama_api_providers/{ => Groq}/groq-api-cookbook/function-calling-101-ecommerce/products.csv (100%) rename recipes/llama_api_providers/{ => Groq}/groq-api-cookbook/json-mode-function-calling-for-sql/data/employees.csv (100%) rename recipes/llama_api_providers/{ => Groq}/groq-api-cookbook/json-mode-function-calling-for-sql/data/purchases.csv (100%) rename recipes/llama_api_providers/{ => Groq}/groq-api-cookbook/json-mode-function-calling-for-sql/json-mode-function-calling-for-sql.ipynb (100%) rename recipes/llama_api_providers/{ => Groq}/groq-api-cookbook/json-mode-function-calling-for-sql/verified-queries/employees-without-purchases.yaml (100%) rename recipes/llama_api_providers/{ => Groq}/groq-api-cookbook/json-mode-function-calling-for-sql/verified-queries/most-expensive-purchase.yaml (100%) rename recipes/llama_api_providers/{ => Groq}/groq-api-cookbook/json-mode-function-calling-for-sql/verified-queries/most-recent-purchases.yaml (100%) rename recipes/llama_api_providers/{ => Groq}/groq-api-cookbook/json-mode-function-calling-for-sql/verified-queries/number-of-teslas.yaml (100%) rename recipes/llama_api_providers/{ => Groq}/groq-api-cookbook/json-mode-social-determinants-of-health/SDOH-Json-mode.ipynb (100%) rename recipes/llama_api_providers/{ => Groq}/groq-api-cookbook/json-mode-social-determinants-of-health/clinical_notes/00456321.txt (100%) rename recipes/llama_api_providers/{ => Groq}/groq-api-cookbook/json-mode-social-determinants-of-health/clinical_notes/00567289.txt (100%) rename recipes/llama_api_providers/{ => Groq}/groq-api-cookbook/json-mode-social-determinants-of-health/clinical_notes/00678934.txt (100%) rename recipes/llama_api_providers/{ => Groq}/groq-api-cookbook/json-mode-social-determinants-of-health/clinical_notes/00785642.txt (100%) rename recipes/llama_api_providers/{ => Groq}/groq-api-cookbook/json-mode-social-determinants-of-health/clinical_notes/00893247.txt (100%) rename recipes/llama_api_providers/{ => Groq}/groq-api-cookbook/llama3-stock-market-function-calling/llama3-stock-market-function-calling.ipynb (100%) rename recipes/llama_api_providers/{ => Groq}/groq-api-cookbook/parallel-tool-use/parallel-tool-use.ipynb (100%) rename recipes/llama_api_providers/{ => Groq}/groq-api-cookbook/parallel-tool-use/requirements.txt (100%) rename recipes/llama_api_providers/{ => Groq}/groq-api-cookbook/rag-langchain-presidential-speeches/presidential_speeches.csv (100%) rename recipes/llama_api_providers/{ => Groq}/groq-api-cookbook/rag-langchain-presidential-speeches/rag-langchain-presidential-speeches.ipynb (100%) rename recipes/llama_api_providers/{ => Groq}/groq-example-templates/conversational-chatbot-langchain/README.md (100%) rename recipes/llama_api_providers/{ => Groq}/groq-example-templates/conversational-chatbot-langchain/main.py (100%) rename recipes/llama_api_providers/{ => Groq}/groq-example-templates/conversational-chatbot-langchain/requirements.txt (100%) rename recipes/llama_api_providers/{ => Groq}/groq-example-templates/crewai-agents/README.md (100%) rename recipes/llama_api_providers/{ => Groq}/groq-example-templates/crewai-agents/main.py (100%) rename recipes/llama_api_providers/{ => Groq}/groq-example-templates/crewai-agents/requirements.txt (100%) rename recipes/llama_api_providers/{ => Groq}/groq-example-templates/groq-quickstart-conversational-chatbot/README.md (100%) rename recipes/llama_api_providers/{ => Groq}/groq-example-templates/groq-quickstart-conversational-chatbot/main.py (100%) rename recipes/llama_api_providers/{ => Groq}/groq-example-templates/groq-quickstart-conversational-chatbot/requirements.txt (100%) rename recipes/llama_api_providers/{ => Groq}/groq-example-templates/groqing-the-stock-market-function-calling-llama3/README.md (100%) rename recipes/llama_api_providers/{ => Groq}/groq-example-templates/groqing-the-stock-market-function-calling-llama3/main.py (100%) rename recipes/llama_api_providers/{ => Groq}/groq-example-templates/groqing-the-stock-market-function-calling-llama3/requirements.txt (100%) rename recipes/llama_api_providers/{ => Groq}/groq-example-templates/llamachat-conversational-chatbot-with-llamaIndex/README.md (100%) rename recipes/llama_api_providers/{ => Groq}/groq-example-templates/llamachat-conversational-chatbot-with-llamaIndex/main.py (100%) rename recipes/llama_api_providers/{ => Groq}/groq-example-templates/llamachat-conversational-chatbot-with-llamaIndex/requirements.txt (100%) rename recipes/llama_api_providers/{ => Groq}/groq-example-templates/presidential-speeches-rag-with-pinecone/README.md (100%) rename recipes/llama_api_providers/{ => Groq}/groq-example-templates/presidential-speeches-rag-with-pinecone/main.py (100%) rename recipes/llama_api_providers/{ => Groq}/groq-example-templates/presidential-speeches-rag-with-pinecone/requirements.txt (100%) rename recipes/llama_api_providers/{ => Groq}/groq-example-templates/text-to-sql-json-mode/README.md (100%) rename recipes/llama_api_providers/{ => Groq}/groq-example-templates/text-to-sql-json-mode/data/employees.csv (100%) rename recipes/llama_api_providers/{ => Groq}/groq-example-templates/text-to-sql-json-mode/data/purchases.csv (100%) rename recipes/llama_api_providers/{ => Groq}/groq-example-templates/text-to-sql-json-mode/main.py (100%) rename recipes/llama_api_providers/{ => Groq}/groq-example-templates/text-to-sql-json-mode/prompts/base_prompt.txt (100%) rename recipes/llama_api_providers/{ => Groq}/groq-example-templates/text-to-sql-json-mode/requirements.txt (100%) rename recipes/llama_api_providers/{ => Groq}/groq-example-templates/verified-sql-function-calling/README.md (100%) rename recipes/llama_api_providers/{ => Groq}/groq-example-templates/verified-sql-function-calling/data/employees.csv (100%) rename recipes/llama_api_providers/{ => Groq}/groq-example-templates/verified-sql-function-calling/data/purchases.csv (100%) rename recipes/llama_api_providers/{ => Groq}/groq-example-templates/verified-sql-function-calling/main.py (100%) rename recipes/llama_api_providers/{ => Groq}/groq-example-templates/verified-sql-function-calling/requirements.txt (100%) rename recipes/llama_api_providers/{ => Groq}/groq-example-templates/verified-sql-function-calling/verified-queries/employees-without-purchases.yaml (100%) rename recipes/llama_api_providers/{ => Groq}/groq-example-templates/verified-sql-function-calling/verified-queries/most-expensive-purchase.yaml (100%) rename recipes/llama_api_providers/{ => Groq}/groq-example-templates/verified-sql-function-calling/verified-queries/most-recent-purchases.yaml (100%) rename recipes/llama_api_providers/{ => Groq}/groq-example-templates/verified-sql-function-calling/verified-queries/number-of-teslas.yaml (100%) create mode 100644 recipes/llama_api_providers/Groq/llama3_cookbook_groq.ipynb delete mode 100644 recipes/llama_api_providers/llama3_cookbook_groq.ipynb diff --git a/recipes/llama_api_providers/groq-api-cookbook/function-calling-101-ecommerce/Function-Calling-101-Ecommerce.ipynb b/recipes/llama_api_providers/Groq/groq-api-cookbook/function-calling-101-ecommerce/Function-Calling-101-Ecommerce.ipynb similarity index 100% rename from recipes/llama_api_providers/groq-api-cookbook/function-calling-101-ecommerce/Function-Calling-101-Ecommerce.ipynb rename to recipes/llama_api_providers/Groq/groq-api-cookbook/function-calling-101-ecommerce/Function-Calling-101-Ecommerce.ipynb diff --git a/recipes/llama_api_providers/groq-api-cookbook/function-calling-101-ecommerce/customers.csv b/recipes/llama_api_providers/Groq/groq-api-cookbook/function-calling-101-ecommerce/customers.csv similarity index 100% rename from recipes/llama_api_providers/groq-api-cookbook/function-calling-101-ecommerce/customers.csv rename to recipes/llama_api_providers/Groq/groq-api-cookbook/function-calling-101-ecommerce/customers.csv diff --git a/recipes/llama_api_providers/groq-api-cookbook/function-calling-101-ecommerce/orders.csv b/recipes/llama_api_providers/Groq/groq-api-cookbook/function-calling-101-ecommerce/orders.csv similarity index 100% rename from recipes/llama_api_providers/groq-api-cookbook/function-calling-101-ecommerce/orders.csv rename to recipes/llama_api_providers/Groq/groq-api-cookbook/function-calling-101-ecommerce/orders.csv diff --git a/recipes/llama_api_providers/groq-api-cookbook/function-calling-101-ecommerce/products.csv b/recipes/llama_api_providers/Groq/groq-api-cookbook/function-calling-101-ecommerce/products.csv similarity index 100% rename from recipes/llama_api_providers/groq-api-cookbook/function-calling-101-ecommerce/products.csv rename to recipes/llama_api_providers/Groq/groq-api-cookbook/function-calling-101-ecommerce/products.csv diff --git a/recipes/llama_api_providers/groq-api-cookbook/json-mode-function-calling-for-sql/data/employees.csv b/recipes/llama_api_providers/Groq/groq-api-cookbook/json-mode-function-calling-for-sql/data/employees.csv similarity index 100% rename from recipes/llama_api_providers/groq-api-cookbook/json-mode-function-calling-for-sql/data/employees.csv rename to recipes/llama_api_providers/Groq/groq-api-cookbook/json-mode-function-calling-for-sql/data/employees.csv diff --git a/recipes/llama_api_providers/groq-api-cookbook/json-mode-function-calling-for-sql/data/purchases.csv b/recipes/llama_api_providers/Groq/groq-api-cookbook/json-mode-function-calling-for-sql/data/purchases.csv similarity index 100% rename from recipes/llama_api_providers/groq-api-cookbook/json-mode-function-calling-for-sql/data/purchases.csv rename to recipes/llama_api_providers/Groq/groq-api-cookbook/json-mode-function-calling-for-sql/data/purchases.csv diff --git a/recipes/llama_api_providers/groq-api-cookbook/json-mode-function-calling-for-sql/json-mode-function-calling-for-sql.ipynb b/recipes/llama_api_providers/Groq/groq-api-cookbook/json-mode-function-calling-for-sql/json-mode-function-calling-for-sql.ipynb similarity index 100% rename from recipes/llama_api_providers/groq-api-cookbook/json-mode-function-calling-for-sql/json-mode-function-calling-for-sql.ipynb rename to recipes/llama_api_providers/Groq/groq-api-cookbook/json-mode-function-calling-for-sql/json-mode-function-calling-for-sql.ipynb diff --git a/recipes/llama_api_providers/groq-api-cookbook/json-mode-function-calling-for-sql/verified-queries/employees-without-purchases.yaml b/recipes/llama_api_providers/Groq/groq-api-cookbook/json-mode-function-calling-for-sql/verified-queries/employees-without-purchases.yaml similarity index 100% rename from recipes/llama_api_providers/groq-api-cookbook/json-mode-function-calling-for-sql/verified-queries/employees-without-purchases.yaml rename to recipes/llama_api_providers/Groq/groq-api-cookbook/json-mode-function-calling-for-sql/verified-queries/employees-without-purchases.yaml diff --git a/recipes/llama_api_providers/groq-api-cookbook/json-mode-function-calling-for-sql/verified-queries/most-expensive-purchase.yaml b/recipes/llama_api_providers/Groq/groq-api-cookbook/json-mode-function-calling-for-sql/verified-queries/most-expensive-purchase.yaml similarity index 100% rename from recipes/llama_api_providers/groq-api-cookbook/json-mode-function-calling-for-sql/verified-queries/most-expensive-purchase.yaml rename to recipes/llama_api_providers/Groq/groq-api-cookbook/json-mode-function-calling-for-sql/verified-queries/most-expensive-purchase.yaml diff --git a/recipes/llama_api_providers/groq-api-cookbook/json-mode-function-calling-for-sql/verified-queries/most-recent-purchases.yaml b/recipes/llama_api_providers/Groq/groq-api-cookbook/json-mode-function-calling-for-sql/verified-queries/most-recent-purchases.yaml similarity index 100% rename from recipes/llama_api_providers/groq-api-cookbook/json-mode-function-calling-for-sql/verified-queries/most-recent-purchases.yaml rename to recipes/llama_api_providers/Groq/groq-api-cookbook/json-mode-function-calling-for-sql/verified-queries/most-recent-purchases.yaml diff --git a/recipes/llama_api_providers/groq-api-cookbook/json-mode-function-calling-for-sql/verified-queries/number-of-teslas.yaml b/recipes/llama_api_providers/Groq/groq-api-cookbook/json-mode-function-calling-for-sql/verified-queries/number-of-teslas.yaml similarity index 100% rename from recipes/llama_api_providers/groq-api-cookbook/json-mode-function-calling-for-sql/verified-queries/number-of-teslas.yaml rename to recipes/llama_api_providers/Groq/groq-api-cookbook/json-mode-function-calling-for-sql/verified-queries/number-of-teslas.yaml diff --git a/recipes/llama_api_providers/groq-api-cookbook/json-mode-social-determinants-of-health/SDOH-Json-mode.ipynb b/recipes/llama_api_providers/Groq/groq-api-cookbook/json-mode-social-determinants-of-health/SDOH-Json-mode.ipynb similarity index 100% rename from recipes/llama_api_providers/groq-api-cookbook/json-mode-social-determinants-of-health/SDOH-Json-mode.ipynb rename to recipes/llama_api_providers/Groq/groq-api-cookbook/json-mode-social-determinants-of-health/SDOH-Json-mode.ipynb diff --git a/recipes/llama_api_providers/groq-api-cookbook/json-mode-social-determinants-of-health/clinical_notes/00456321.txt b/recipes/llama_api_providers/Groq/groq-api-cookbook/json-mode-social-determinants-of-health/clinical_notes/00456321.txt similarity index 100% rename from recipes/llama_api_providers/groq-api-cookbook/json-mode-social-determinants-of-health/clinical_notes/00456321.txt rename to recipes/llama_api_providers/Groq/groq-api-cookbook/json-mode-social-determinants-of-health/clinical_notes/00456321.txt diff --git a/recipes/llama_api_providers/groq-api-cookbook/json-mode-social-determinants-of-health/clinical_notes/00567289.txt b/recipes/llama_api_providers/Groq/groq-api-cookbook/json-mode-social-determinants-of-health/clinical_notes/00567289.txt similarity index 100% rename from recipes/llama_api_providers/groq-api-cookbook/json-mode-social-determinants-of-health/clinical_notes/00567289.txt rename to recipes/llama_api_providers/Groq/groq-api-cookbook/json-mode-social-determinants-of-health/clinical_notes/00567289.txt diff --git a/recipes/llama_api_providers/groq-api-cookbook/json-mode-social-determinants-of-health/clinical_notes/00678934.txt b/recipes/llama_api_providers/Groq/groq-api-cookbook/json-mode-social-determinants-of-health/clinical_notes/00678934.txt similarity index 100% rename from recipes/llama_api_providers/groq-api-cookbook/json-mode-social-determinants-of-health/clinical_notes/00678934.txt rename to recipes/llama_api_providers/Groq/groq-api-cookbook/json-mode-social-determinants-of-health/clinical_notes/00678934.txt diff --git a/recipes/llama_api_providers/groq-api-cookbook/json-mode-social-determinants-of-health/clinical_notes/00785642.txt b/recipes/llama_api_providers/Groq/groq-api-cookbook/json-mode-social-determinants-of-health/clinical_notes/00785642.txt similarity index 100% rename from recipes/llama_api_providers/groq-api-cookbook/json-mode-social-determinants-of-health/clinical_notes/00785642.txt rename to recipes/llama_api_providers/Groq/groq-api-cookbook/json-mode-social-determinants-of-health/clinical_notes/00785642.txt diff --git a/recipes/llama_api_providers/groq-api-cookbook/json-mode-social-determinants-of-health/clinical_notes/00893247.txt b/recipes/llama_api_providers/Groq/groq-api-cookbook/json-mode-social-determinants-of-health/clinical_notes/00893247.txt similarity index 100% rename from recipes/llama_api_providers/groq-api-cookbook/json-mode-social-determinants-of-health/clinical_notes/00893247.txt rename to recipes/llama_api_providers/Groq/groq-api-cookbook/json-mode-social-determinants-of-health/clinical_notes/00893247.txt diff --git a/recipes/llama_api_providers/groq-api-cookbook/llama3-stock-market-function-calling/llama3-stock-market-function-calling.ipynb b/recipes/llama_api_providers/Groq/groq-api-cookbook/llama3-stock-market-function-calling/llama3-stock-market-function-calling.ipynb similarity index 100% rename from recipes/llama_api_providers/groq-api-cookbook/llama3-stock-market-function-calling/llama3-stock-market-function-calling.ipynb rename to recipes/llama_api_providers/Groq/groq-api-cookbook/llama3-stock-market-function-calling/llama3-stock-market-function-calling.ipynb diff --git a/recipes/llama_api_providers/groq-api-cookbook/parallel-tool-use/parallel-tool-use.ipynb b/recipes/llama_api_providers/Groq/groq-api-cookbook/parallel-tool-use/parallel-tool-use.ipynb similarity index 100% rename from recipes/llama_api_providers/groq-api-cookbook/parallel-tool-use/parallel-tool-use.ipynb rename to recipes/llama_api_providers/Groq/groq-api-cookbook/parallel-tool-use/parallel-tool-use.ipynb diff --git a/recipes/llama_api_providers/groq-api-cookbook/parallel-tool-use/requirements.txt b/recipes/llama_api_providers/Groq/groq-api-cookbook/parallel-tool-use/requirements.txt similarity index 100% rename from recipes/llama_api_providers/groq-api-cookbook/parallel-tool-use/requirements.txt rename to recipes/llama_api_providers/Groq/groq-api-cookbook/parallel-tool-use/requirements.txt diff --git a/recipes/llama_api_providers/groq-api-cookbook/rag-langchain-presidential-speeches/presidential_speeches.csv b/recipes/llama_api_providers/Groq/groq-api-cookbook/rag-langchain-presidential-speeches/presidential_speeches.csv similarity index 100% rename from recipes/llama_api_providers/groq-api-cookbook/rag-langchain-presidential-speeches/presidential_speeches.csv rename to recipes/llama_api_providers/Groq/groq-api-cookbook/rag-langchain-presidential-speeches/presidential_speeches.csv diff --git a/recipes/llama_api_providers/groq-api-cookbook/rag-langchain-presidential-speeches/rag-langchain-presidential-speeches.ipynb b/recipes/llama_api_providers/Groq/groq-api-cookbook/rag-langchain-presidential-speeches/rag-langchain-presidential-speeches.ipynb similarity index 100% rename from recipes/llama_api_providers/groq-api-cookbook/rag-langchain-presidential-speeches/rag-langchain-presidential-speeches.ipynb rename to recipes/llama_api_providers/Groq/groq-api-cookbook/rag-langchain-presidential-speeches/rag-langchain-presidential-speeches.ipynb diff --git a/recipes/llama_api_providers/groq-example-templates/conversational-chatbot-langchain/README.md b/recipes/llama_api_providers/Groq/groq-example-templates/conversational-chatbot-langchain/README.md similarity index 100% rename from recipes/llama_api_providers/groq-example-templates/conversational-chatbot-langchain/README.md rename to recipes/llama_api_providers/Groq/groq-example-templates/conversational-chatbot-langchain/README.md diff --git a/recipes/llama_api_providers/groq-example-templates/conversational-chatbot-langchain/main.py b/recipes/llama_api_providers/Groq/groq-example-templates/conversational-chatbot-langchain/main.py similarity index 100% rename from recipes/llama_api_providers/groq-example-templates/conversational-chatbot-langchain/main.py rename to recipes/llama_api_providers/Groq/groq-example-templates/conversational-chatbot-langchain/main.py diff --git a/recipes/llama_api_providers/groq-example-templates/conversational-chatbot-langchain/requirements.txt b/recipes/llama_api_providers/Groq/groq-example-templates/conversational-chatbot-langchain/requirements.txt similarity index 100% rename from recipes/llama_api_providers/groq-example-templates/conversational-chatbot-langchain/requirements.txt rename to recipes/llama_api_providers/Groq/groq-example-templates/conversational-chatbot-langchain/requirements.txt diff --git a/recipes/llama_api_providers/groq-example-templates/crewai-agents/README.md b/recipes/llama_api_providers/Groq/groq-example-templates/crewai-agents/README.md similarity index 100% rename from recipes/llama_api_providers/groq-example-templates/crewai-agents/README.md rename to recipes/llama_api_providers/Groq/groq-example-templates/crewai-agents/README.md diff --git a/recipes/llama_api_providers/groq-example-templates/crewai-agents/main.py b/recipes/llama_api_providers/Groq/groq-example-templates/crewai-agents/main.py similarity index 100% rename from recipes/llama_api_providers/groq-example-templates/crewai-agents/main.py rename to recipes/llama_api_providers/Groq/groq-example-templates/crewai-agents/main.py diff --git a/recipes/llama_api_providers/groq-example-templates/crewai-agents/requirements.txt b/recipes/llama_api_providers/Groq/groq-example-templates/crewai-agents/requirements.txt similarity index 100% rename from recipes/llama_api_providers/groq-example-templates/crewai-agents/requirements.txt rename to recipes/llama_api_providers/Groq/groq-example-templates/crewai-agents/requirements.txt diff --git a/recipes/llama_api_providers/groq-example-templates/groq-quickstart-conversational-chatbot/README.md b/recipes/llama_api_providers/Groq/groq-example-templates/groq-quickstart-conversational-chatbot/README.md similarity index 100% rename from recipes/llama_api_providers/groq-example-templates/groq-quickstart-conversational-chatbot/README.md rename to recipes/llama_api_providers/Groq/groq-example-templates/groq-quickstart-conversational-chatbot/README.md diff --git a/recipes/llama_api_providers/groq-example-templates/groq-quickstart-conversational-chatbot/main.py b/recipes/llama_api_providers/Groq/groq-example-templates/groq-quickstart-conversational-chatbot/main.py similarity index 100% rename from recipes/llama_api_providers/groq-example-templates/groq-quickstart-conversational-chatbot/main.py rename to recipes/llama_api_providers/Groq/groq-example-templates/groq-quickstart-conversational-chatbot/main.py diff --git a/recipes/llama_api_providers/groq-example-templates/groq-quickstart-conversational-chatbot/requirements.txt b/recipes/llama_api_providers/Groq/groq-example-templates/groq-quickstart-conversational-chatbot/requirements.txt similarity index 100% rename from recipes/llama_api_providers/groq-example-templates/groq-quickstart-conversational-chatbot/requirements.txt rename to recipes/llama_api_providers/Groq/groq-example-templates/groq-quickstart-conversational-chatbot/requirements.txt diff --git a/recipes/llama_api_providers/groq-example-templates/groqing-the-stock-market-function-calling-llama3/README.md b/recipes/llama_api_providers/Groq/groq-example-templates/groqing-the-stock-market-function-calling-llama3/README.md similarity index 100% rename from recipes/llama_api_providers/groq-example-templates/groqing-the-stock-market-function-calling-llama3/README.md rename to recipes/llama_api_providers/Groq/groq-example-templates/groqing-the-stock-market-function-calling-llama3/README.md diff --git a/recipes/llama_api_providers/groq-example-templates/groqing-the-stock-market-function-calling-llama3/main.py b/recipes/llama_api_providers/Groq/groq-example-templates/groqing-the-stock-market-function-calling-llama3/main.py similarity index 100% rename from recipes/llama_api_providers/groq-example-templates/groqing-the-stock-market-function-calling-llama3/main.py rename to recipes/llama_api_providers/Groq/groq-example-templates/groqing-the-stock-market-function-calling-llama3/main.py diff --git a/recipes/llama_api_providers/groq-example-templates/groqing-the-stock-market-function-calling-llama3/requirements.txt b/recipes/llama_api_providers/Groq/groq-example-templates/groqing-the-stock-market-function-calling-llama3/requirements.txt similarity index 100% rename from recipes/llama_api_providers/groq-example-templates/groqing-the-stock-market-function-calling-llama3/requirements.txt rename to recipes/llama_api_providers/Groq/groq-example-templates/groqing-the-stock-market-function-calling-llama3/requirements.txt diff --git a/recipes/llama_api_providers/groq-example-templates/llamachat-conversational-chatbot-with-llamaIndex/README.md b/recipes/llama_api_providers/Groq/groq-example-templates/llamachat-conversational-chatbot-with-llamaIndex/README.md similarity index 100% rename from recipes/llama_api_providers/groq-example-templates/llamachat-conversational-chatbot-with-llamaIndex/README.md rename to recipes/llama_api_providers/Groq/groq-example-templates/llamachat-conversational-chatbot-with-llamaIndex/README.md diff --git a/recipes/llama_api_providers/groq-example-templates/llamachat-conversational-chatbot-with-llamaIndex/main.py b/recipes/llama_api_providers/Groq/groq-example-templates/llamachat-conversational-chatbot-with-llamaIndex/main.py similarity index 100% rename from recipes/llama_api_providers/groq-example-templates/llamachat-conversational-chatbot-with-llamaIndex/main.py rename to recipes/llama_api_providers/Groq/groq-example-templates/llamachat-conversational-chatbot-with-llamaIndex/main.py diff --git a/recipes/llama_api_providers/groq-example-templates/llamachat-conversational-chatbot-with-llamaIndex/requirements.txt b/recipes/llama_api_providers/Groq/groq-example-templates/llamachat-conversational-chatbot-with-llamaIndex/requirements.txt similarity index 100% rename from recipes/llama_api_providers/groq-example-templates/llamachat-conversational-chatbot-with-llamaIndex/requirements.txt rename to recipes/llama_api_providers/Groq/groq-example-templates/llamachat-conversational-chatbot-with-llamaIndex/requirements.txt diff --git a/recipes/llama_api_providers/groq-example-templates/presidential-speeches-rag-with-pinecone/README.md b/recipes/llama_api_providers/Groq/groq-example-templates/presidential-speeches-rag-with-pinecone/README.md similarity index 100% rename from recipes/llama_api_providers/groq-example-templates/presidential-speeches-rag-with-pinecone/README.md rename to recipes/llama_api_providers/Groq/groq-example-templates/presidential-speeches-rag-with-pinecone/README.md diff --git a/recipes/llama_api_providers/groq-example-templates/presidential-speeches-rag-with-pinecone/main.py b/recipes/llama_api_providers/Groq/groq-example-templates/presidential-speeches-rag-with-pinecone/main.py similarity index 100% rename from recipes/llama_api_providers/groq-example-templates/presidential-speeches-rag-with-pinecone/main.py rename to recipes/llama_api_providers/Groq/groq-example-templates/presidential-speeches-rag-with-pinecone/main.py diff --git a/recipes/llama_api_providers/groq-example-templates/presidential-speeches-rag-with-pinecone/requirements.txt b/recipes/llama_api_providers/Groq/groq-example-templates/presidential-speeches-rag-with-pinecone/requirements.txt similarity index 100% rename from recipes/llama_api_providers/groq-example-templates/presidential-speeches-rag-with-pinecone/requirements.txt rename to recipes/llama_api_providers/Groq/groq-example-templates/presidential-speeches-rag-with-pinecone/requirements.txt diff --git a/recipes/llama_api_providers/groq-example-templates/text-to-sql-json-mode/README.md b/recipes/llama_api_providers/Groq/groq-example-templates/text-to-sql-json-mode/README.md similarity index 100% rename from recipes/llama_api_providers/groq-example-templates/text-to-sql-json-mode/README.md rename to recipes/llama_api_providers/Groq/groq-example-templates/text-to-sql-json-mode/README.md diff --git a/recipes/llama_api_providers/groq-example-templates/text-to-sql-json-mode/data/employees.csv b/recipes/llama_api_providers/Groq/groq-example-templates/text-to-sql-json-mode/data/employees.csv similarity index 100% rename from recipes/llama_api_providers/groq-example-templates/text-to-sql-json-mode/data/employees.csv rename to recipes/llama_api_providers/Groq/groq-example-templates/text-to-sql-json-mode/data/employees.csv diff --git a/recipes/llama_api_providers/groq-example-templates/text-to-sql-json-mode/data/purchases.csv b/recipes/llama_api_providers/Groq/groq-example-templates/text-to-sql-json-mode/data/purchases.csv similarity index 100% rename from recipes/llama_api_providers/groq-example-templates/text-to-sql-json-mode/data/purchases.csv rename to recipes/llama_api_providers/Groq/groq-example-templates/text-to-sql-json-mode/data/purchases.csv diff --git a/recipes/llama_api_providers/groq-example-templates/text-to-sql-json-mode/main.py b/recipes/llama_api_providers/Groq/groq-example-templates/text-to-sql-json-mode/main.py similarity index 100% rename from recipes/llama_api_providers/groq-example-templates/text-to-sql-json-mode/main.py rename to recipes/llama_api_providers/Groq/groq-example-templates/text-to-sql-json-mode/main.py diff --git a/recipes/llama_api_providers/groq-example-templates/text-to-sql-json-mode/prompts/base_prompt.txt b/recipes/llama_api_providers/Groq/groq-example-templates/text-to-sql-json-mode/prompts/base_prompt.txt similarity index 100% rename from recipes/llama_api_providers/groq-example-templates/text-to-sql-json-mode/prompts/base_prompt.txt rename to recipes/llama_api_providers/Groq/groq-example-templates/text-to-sql-json-mode/prompts/base_prompt.txt diff --git a/recipes/llama_api_providers/groq-example-templates/text-to-sql-json-mode/requirements.txt b/recipes/llama_api_providers/Groq/groq-example-templates/text-to-sql-json-mode/requirements.txt similarity index 100% rename from recipes/llama_api_providers/groq-example-templates/text-to-sql-json-mode/requirements.txt rename to recipes/llama_api_providers/Groq/groq-example-templates/text-to-sql-json-mode/requirements.txt diff --git a/recipes/llama_api_providers/groq-example-templates/verified-sql-function-calling/README.md b/recipes/llama_api_providers/Groq/groq-example-templates/verified-sql-function-calling/README.md similarity index 100% rename from recipes/llama_api_providers/groq-example-templates/verified-sql-function-calling/README.md rename to recipes/llama_api_providers/Groq/groq-example-templates/verified-sql-function-calling/README.md diff --git a/recipes/llama_api_providers/groq-example-templates/verified-sql-function-calling/data/employees.csv b/recipes/llama_api_providers/Groq/groq-example-templates/verified-sql-function-calling/data/employees.csv similarity index 100% rename from recipes/llama_api_providers/groq-example-templates/verified-sql-function-calling/data/employees.csv rename to recipes/llama_api_providers/Groq/groq-example-templates/verified-sql-function-calling/data/employees.csv diff --git a/recipes/llama_api_providers/groq-example-templates/verified-sql-function-calling/data/purchases.csv b/recipes/llama_api_providers/Groq/groq-example-templates/verified-sql-function-calling/data/purchases.csv similarity index 100% rename from recipes/llama_api_providers/groq-example-templates/verified-sql-function-calling/data/purchases.csv rename to recipes/llama_api_providers/Groq/groq-example-templates/verified-sql-function-calling/data/purchases.csv diff --git a/recipes/llama_api_providers/groq-example-templates/verified-sql-function-calling/main.py b/recipes/llama_api_providers/Groq/groq-example-templates/verified-sql-function-calling/main.py similarity index 100% rename from recipes/llama_api_providers/groq-example-templates/verified-sql-function-calling/main.py rename to recipes/llama_api_providers/Groq/groq-example-templates/verified-sql-function-calling/main.py diff --git a/recipes/llama_api_providers/groq-example-templates/verified-sql-function-calling/requirements.txt b/recipes/llama_api_providers/Groq/groq-example-templates/verified-sql-function-calling/requirements.txt similarity index 100% rename from recipes/llama_api_providers/groq-example-templates/verified-sql-function-calling/requirements.txt rename to recipes/llama_api_providers/Groq/groq-example-templates/verified-sql-function-calling/requirements.txt diff --git a/recipes/llama_api_providers/groq-example-templates/verified-sql-function-calling/verified-queries/employees-without-purchases.yaml b/recipes/llama_api_providers/Groq/groq-example-templates/verified-sql-function-calling/verified-queries/employees-without-purchases.yaml similarity index 100% rename from recipes/llama_api_providers/groq-example-templates/verified-sql-function-calling/verified-queries/employees-without-purchases.yaml rename to recipes/llama_api_providers/Groq/groq-example-templates/verified-sql-function-calling/verified-queries/employees-without-purchases.yaml diff --git a/recipes/llama_api_providers/groq-example-templates/verified-sql-function-calling/verified-queries/most-expensive-purchase.yaml b/recipes/llama_api_providers/Groq/groq-example-templates/verified-sql-function-calling/verified-queries/most-expensive-purchase.yaml similarity index 100% rename from recipes/llama_api_providers/groq-example-templates/verified-sql-function-calling/verified-queries/most-expensive-purchase.yaml rename to recipes/llama_api_providers/Groq/groq-example-templates/verified-sql-function-calling/verified-queries/most-expensive-purchase.yaml diff --git a/recipes/llama_api_providers/groq-example-templates/verified-sql-function-calling/verified-queries/most-recent-purchases.yaml b/recipes/llama_api_providers/Groq/groq-example-templates/verified-sql-function-calling/verified-queries/most-recent-purchases.yaml similarity index 100% rename from recipes/llama_api_providers/groq-example-templates/verified-sql-function-calling/verified-queries/most-recent-purchases.yaml rename to recipes/llama_api_providers/Groq/groq-example-templates/verified-sql-function-calling/verified-queries/most-recent-purchases.yaml diff --git a/recipes/llama_api_providers/groq-example-templates/verified-sql-function-calling/verified-queries/number-of-teslas.yaml b/recipes/llama_api_providers/Groq/groq-example-templates/verified-sql-function-calling/verified-queries/number-of-teslas.yaml similarity index 100% rename from recipes/llama_api_providers/groq-example-templates/verified-sql-function-calling/verified-queries/number-of-teslas.yaml rename to recipes/llama_api_providers/Groq/groq-example-templates/verified-sql-function-calling/verified-queries/number-of-teslas.yaml diff --git a/recipes/llama_api_providers/Groq/llama3_cookbook_groq.ipynb b/recipes/llama_api_providers/Groq/llama3_cookbook_groq.ipynb new file mode 100644 index 00000000..6b5ba785 --- /dev/null +++ b/recipes/llama_api_providers/Groq/llama3_cookbook_groq.ipynb @@ -0,0 +1,1708 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "09211e76-286f-4b12-acd7-cfb082dc2d66", + "metadata": {}, + "source": [ + "# Llama 3 Cookbook with LlamaIndex and Groq\n", + "\n", + "<a href=\"https://colab.research.google.com/github/meta-llama/llama-recipes/blob/main/recipes/llama_api_providers/llama3_cookbook_groq.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>\n", + "\n", + "Meta developed and released the Meta [Llama 3](https://ai.meta.com/blog/meta-llama-3/) family of large language models (LLMs), a collection of pretrained and instruction tuned generative text models in 8 and 70B sizes. The Llama 3 instruction tuned models are optimized for dialogue use cases and outperform many of the available open source chat models on common industry benchmarks.\n", + "\n", + "In this notebook, we demonstrate how to use Llama 3 with LlamaIndex for a comprehensive set of use cases. \n", + "1. Basic completion / chat \n", + "2. Basic RAG (Vector Search, Summarization)\n", + "3. Advanced RAG (Routing)\n", + "4. Text-to-SQL \n", + "5. Structured Data Extraction\n", + "6. Chat Engine + Memory\n", + "7. Agents\n", + "\n", + "\n", + "We use Llama3-8B and Llama3-70B through [Groq](https://groq.com) - you can sign up there to get a free trial API key." + ] + }, + { + "cell_type": "markdown", + "id": "de2901c0-e20d-48e5-9385-dbca2258c564", + "metadata": {}, + "source": [ + "## Installation and Setup" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "bcf643ac-b025-4812-aaed-f8f85d1ba505", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Requirement already satisfied: llama-index in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (0.10.16)\n", + "Requirement already satisfied: llama-index-embeddings-openai<0.2.0,>=0.1.5 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index) (0.1.6)\n", + "Requirement already satisfied: llama-index-cli<0.2.0,>=0.1.2 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index) (0.1.7)\n", + "Requirement already satisfied: llama-index-multi-modal-llms-openai<0.2.0,>=0.1.3 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index) (0.1.4)\n", + "Requirement already satisfied: llama-index-agent-openai<0.2.0,>=0.1.4 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index) (0.1.5)\n", + "Requirement already satisfied: llama-index-question-gen-openai<0.2.0,>=0.1.2 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index) (0.1.3)\n", + "Requirement already satisfied: llama-index-readers-file<0.2.0,>=0.1.4 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index) (0.1.8)\n", + "Requirement already satisfied: llama-index-legacy<0.10.0,>=0.9.48 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index) (0.9.48)\n", + "Requirement already satisfied: llama-index-readers-llama-parse<0.2.0,>=0.1.2 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index) (0.1.3)\n", + "Requirement already satisfied: llama-index-llms-openai<0.2.0,>=0.1.5 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index) (0.1.7)\n", + "Requirement already satisfied: llama-index-indices-managed-llama-cloud<0.2.0,>=0.1.2 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index) (0.1.3)\n", + "Requirement already satisfied: llama-index-core<0.11.0,>=0.10.16 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index) (0.10.16.post1)\n", + "Requirement already satisfied: llama-index-program-openai<0.2.0,>=0.1.3 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index) (0.1.4)\n", + "Requirement already satisfied: llama-index-vector-stores-chroma<0.2.0,>=0.1.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-cli<0.2.0,>=0.1.2->llama-index) (0.1.5)\n", + "Requirement already satisfied: SQLAlchemy[asyncio]>=1.4.49 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.16->llama-index) (2.0.25)\n", + "Requirement already satisfied: deprecated>=1.2.9.3 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.16->llama-index) (1.2.14)\n", + "Requirement already satisfied: typing-inspect>=0.8.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.16->llama-index) (0.9.0)\n", + "Requirement already satisfied: pandas in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.16->llama-index) (1.5.1)\n", + "Requirement already satisfied: dataclasses-json in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.16->llama-index) (0.6.4)\n", + "Requirement already satisfied: nltk<4.0.0,>=3.8.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.16->llama-index) (3.8.1)\n", + "Requirement already satisfied: requests>=2.31.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.16->llama-index) (2.31.0)\n", + "Requirement already satisfied: fsspec>=2023.5.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.16->llama-index) (2024.2.0)\n", + "Requirement already satisfied: numpy in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.16->llama-index) (1.23.4)\n", + "Requirement already satisfied: pillow>=9.0.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.16->llama-index) (10.2.0)\n", + "Requirement already satisfied: nest-asyncio<2.0.0,>=1.5.8 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.16->llama-index) (1.6.0)\n", + "Requirement already satisfied: networkx>=3.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.16->llama-index) (3.2.1)\n", + "Requirement already satisfied: aiohttp<4.0.0,>=3.8.6 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.16->llama-index) (3.9.3)\n", + "Requirement already satisfied: tqdm<5.0.0,>=4.66.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.16->llama-index) (4.66.1)\n", + "Requirement already satisfied: typing-extensions>=4.5.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.16->llama-index) (4.9.0)\n", + "Requirement already satisfied: tiktoken>=0.3.3 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.16->llama-index) (0.5.2)\n", + "Requirement already satisfied: dirtyjson<2.0.0,>=1.0.8 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.16->llama-index) (1.0.8)\n", + "Requirement already satisfied: httpx in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.16->llama-index) (0.26.0)\n", + "Requirement already satisfied: tenacity<9.0.0,>=8.2.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.16->llama-index) (8.2.3)\n", + "Requirement already satisfied: llamaindex-py-client<0.2.0,>=0.1.13 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.16->llama-index) (0.1.13)\n", + "Requirement already satisfied: PyYAML>=6.0.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.16->llama-index) (6.0.1)\n", + "Requirement already satisfied: openai>=1.1.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.16->llama-index) (1.13.3)\n", + "Requirement already satisfied: pypdf<5.0.0,>=4.0.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-readers-file<0.2.0,>=0.1.4->llama-index) (4.1.0)\n", + "Requirement already satisfied: pymupdf<2.0.0,>=1.23.21 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-readers-file<0.2.0,>=0.1.4->llama-index) (1.23.26)\n", + "Requirement already satisfied: beautifulsoup4<5.0.0,>=4.12.3 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-readers-file<0.2.0,>=0.1.4->llama-index) (4.12.3)\n", + "Requirement already satisfied: bs4<0.0.3,>=0.0.2 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-readers-file<0.2.0,>=0.1.4->llama-index) (0.0.2)\n", + "Requirement already satisfied: llama-parse<0.4.0,>=0.3.3 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-readers-llama-parse<0.2.0,>=0.1.2->llama-index) (0.3.7)\n", + "Requirement already satisfied: frozenlist>=1.1.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.16->llama-index) (1.4.1)\n", + "Requirement already satisfied: async-timeout<5.0,>=4.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.16->llama-index) (4.0.3)\n", + "Requirement already satisfied: aiosignal>=1.1.2 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.16->llama-index) (1.3.1)\n", + "Requirement already satisfied: attrs>=17.3.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.16->llama-index) (22.1.0)\n", + "Requirement already satisfied: yarl<2.0,>=1.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.16->llama-index) (1.9.4)\n", + "Requirement already satisfied: multidict<7.0,>=4.5 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.16->llama-index) (6.0.5)\n", + "Requirement already satisfied: soupsieve>1.2 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from beautifulsoup4<5.0.0,>=4.12.3->llama-index-readers-file<0.2.0,>=0.1.4->llama-index) (2.3.2.post1)\n", + "Requirement already satisfied: wrapt<2,>=1.10 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from deprecated>=1.2.9.3->llama-index-core<0.11.0,>=0.10.16->llama-index) (1.16.0)\n", + "Requirement already satisfied: onnxruntime<2.0.0,>=1.17.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (1.17.1)\n", + "Requirement already satisfied: tokenizers<0.16.0,>=0.15.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (0.15.1)\n", + "Requirement already satisfied: chromadb<0.5.0,>=0.4.22 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (0.4.24)\n", + "Requirement already satisfied: pydantic>=1.10 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llamaindex-py-client<0.2.0,>=0.1.13->llama-index-core<0.11.0,>=0.10.16->llama-index) (2.5.1)\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Requirement already satisfied: sniffio in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from httpx->llama-index-core<0.11.0,>=0.10.16->llama-index) (1.3.0)\n", + "Requirement already satisfied: idna in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from httpx->llama-index-core<0.11.0,>=0.10.16->llama-index) (3.4)\n", + "Requirement already satisfied: anyio in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from httpx->llama-index-core<0.11.0,>=0.10.16->llama-index) (3.7.1)\n", + "Requirement already satisfied: certifi in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from httpx->llama-index-core<0.11.0,>=0.10.16->llama-index) (2024.2.2)\n", + "Requirement already satisfied: httpcore==1.* in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from httpx->llama-index-core<0.11.0,>=0.10.16->llama-index) (1.0.2)\n", + "Requirement already satisfied: h11<0.15,>=0.13 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from httpcore==1.*->httpx->llama-index-core<0.11.0,>=0.10.16->llama-index) (0.14.0)\n", + "Requirement already satisfied: regex>=2021.8.3 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from nltk<4.0.0,>=3.8.1->llama-index-core<0.11.0,>=0.10.16->llama-index) (2023.12.25)\n", + "Requirement already satisfied: click in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from nltk<4.0.0,>=3.8.1->llama-index-core<0.11.0,>=0.10.16->llama-index) (8.1.7)\n", + "Requirement already satisfied: joblib in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from nltk<4.0.0,>=3.8.1->llama-index-core<0.11.0,>=0.10.16->llama-index) (1.3.2)\n", + "Requirement already satisfied: distro<2,>=1.7.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from openai>=1.1.0->llama-index-core<0.11.0,>=0.10.16->llama-index) (1.9.0)\n", + "Requirement already satisfied: PyMuPDFb==1.23.22 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from pymupdf<2.0.0,>=1.23.21->llama-index-readers-file<0.2.0,>=0.1.4->llama-index) (1.23.22)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from requests>=2.31.0->llama-index-core<0.11.0,>=0.10.16->llama-index) (2.2.0)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from requests>=2.31.0->llama-index-core<0.11.0,>=0.10.16->llama-index) (3.3.2)\n", + "Requirement already satisfied: greenlet!=0.4.17 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from SQLAlchemy[asyncio]>=1.4.49->llama-index-core<0.11.0,>=0.10.16->llama-index) (3.0.1)\n", + "Requirement already satisfied: mypy-extensions>=0.3.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from typing-inspect>=0.8.0->llama-index-core<0.11.0,>=0.10.16->llama-index) (1.0.0)\n", + "Requirement already satisfied: marshmallow<4.0.0,>=3.18.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from dataclasses-json->llama-index-core<0.11.0,>=0.10.16->llama-index) (3.20.2)\n", + "Requirement already satisfied: python-dateutil>=2.8.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from pandas->llama-index-core<0.11.0,>=0.10.16->llama-index) (2.8.2)\n", + "Requirement already satisfied: pytz>=2020.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from pandas->llama-index-core<0.11.0,>=0.10.16->llama-index) (2022.5)\n", + "Requirement already satisfied: exceptiongroup in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from anyio->httpx->llama-index-core<0.11.0,>=0.10.16->llama-index) (1.2.0)\n", + "Requirement already satisfied: opentelemetry-instrumentation-fastapi>=0.41b0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (0.44b0)\n", + "Requirement already satisfied: overrides>=7.3.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (7.7.0)\n", + "Requirement already satisfied: orjson>=3.9.12 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (3.9.15)\n", + "Requirement already satisfied: fastapi>=0.95.2 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (0.104.1)\n", + "Requirement already satisfied: chroma-hnswlib==0.7.3 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (0.7.3)\n", + "Requirement already satisfied: uvicorn[standard]>=0.18.3 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (0.24.0.post1)\n", + "Requirement already satisfied: kubernetes>=28.1.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (29.0.0)\n", + "Requirement already satisfied: opentelemetry-sdk>=1.2.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (1.23.0)\n", + "Requirement already satisfied: posthog>=2.4.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (3.5.0)\n", + "Requirement already satisfied: build>=1.0.3 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (1.1.1)\n", + "Requirement already satisfied: importlib-resources in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (6.1.2)\n", + "Requirement already satisfied: typer>=0.9.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (0.9.0)\n", + "Requirement already satisfied: opentelemetry-exporter-otlp-proto-grpc>=1.2.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (1.23.0)\n", + "Requirement already satisfied: opentelemetry-api>=1.2.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (1.23.0)\n", + "Requirement already satisfied: grpcio>=1.58.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (1.62.1)\n", + "Requirement already satisfied: pypika>=0.48.9 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (0.48.9)\n", + "Requirement already satisfied: mmh3>=4.0.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (4.1.0)\n", + "Requirement already satisfied: bcrypt>=4.0.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (4.1.2)\n", + "Requirement already satisfied: pulsar-client>=3.1.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (3.4.0)\n", + "Requirement already satisfied: packaging>=17.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from marshmallow<4.0.0,>=3.18.0->dataclasses-json->llama-index-core<0.11.0,>=0.10.16->llama-index) (23.2)\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Requirement already satisfied: flatbuffers in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from onnxruntime<2.0.0,>=1.17.0->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (23.5.26)\n", + "Requirement already satisfied: protobuf in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from onnxruntime<2.0.0,>=1.17.0->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (4.25.2)\n", + "Requirement already satisfied: coloredlogs in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from onnxruntime<2.0.0,>=1.17.0->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (15.0.1)\n", + "Requirement already satisfied: sympy in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from onnxruntime<2.0.0,>=1.17.0->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (1.12)\n", + "Requirement already satisfied: annotated-types>=0.4.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from pydantic>=1.10->llamaindex-py-client<0.2.0,>=0.1.13->llama-index-core<0.11.0,>=0.10.16->llama-index) (0.6.0)\n", + "Requirement already satisfied: pydantic-core==2.14.3 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from pydantic>=1.10->llamaindex-py-client<0.2.0,>=0.1.13->llama-index-core<0.11.0,>=0.10.16->llama-index) (2.14.3)\n", + "Requirement already satisfied: six>=1.5 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from python-dateutil>=2.8.1->pandas->llama-index-core<0.11.0,>=0.10.16->llama-index) (1.16.0)\n", + "Requirement already satisfied: huggingface_hub<1.0,>=0.16.4 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from tokenizers<0.16.0,>=0.15.1->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (0.20.3)\n", + "Requirement already satisfied: tomli>=1.1.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from build>=1.0.3->chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (2.0.1)\n", + "Requirement already satisfied: pyproject_hooks in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from build>=1.0.3->chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (1.0.0)\n", + "Requirement already satisfied: starlette<0.28.0,>=0.27.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from fastapi>=0.95.2->chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (0.27.0)\n", + "Requirement already satisfied: filelock in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from huggingface_hub<1.0,>=0.16.4->tokenizers<0.16.0,>=0.15.1->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (3.13.1)\n", + "Requirement already satisfied: google-auth>=1.0.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from kubernetes>=28.1.0->chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (2.28.1)\n", + "Requirement already satisfied: websocket-client!=0.40.0,!=0.41.*,!=0.42.*,>=0.32.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from kubernetes>=28.1.0->chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (1.4.1)\n", + "Requirement already satisfied: requests-oauthlib in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from kubernetes>=28.1.0->chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (1.3.1)\n", + "Requirement already satisfied: oauthlib>=3.2.2 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from kubernetes>=28.1.0->chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (3.2.2)\n", + "Requirement already satisfied: importlib-metadata<7.0,>=6.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from opentelemetry-api>=1.2.0->chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (6.11.0)\n", + "Requirement already satisfied: opentelemetry-exporter-otlp-proto-common==1.23.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from opentelemetry-exporter-otlp-proto-grpc>=1.2.0->chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (1.23.0)\n", + "Requirement already satisfied: opentelemetry-proto==1.23.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from opentelemetry-exporter-otlp-proto-grpc>=1.2.0->chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (1.23.0)\n", + "Requirement already satisfied: googleapis-common-protos~=1.52 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from opentelemetry-exporter-otlp-proto-grpc>=1.2.0->chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (1.62.0)\n", + "Requirement already satisfied: opentelemetry-semantic-conventions==0.44b0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from opentelemetry-instrumentation-fastapi>=0.41b0->chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (0.44b0)\n", + "Requirement already satisfied: opentelemetry-util-http==0.44b0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from opentelemetry-instrumentation-fastapi>=0.41b0->chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (0.44b0)\n", + "Requirement already satisfied: opentelemetry-instrumentation-asgi==0.44b0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from opentelemetry-instrumentation-fastapi>=0.41b0->chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (0.44b0)\n", + "Requirement already satisfied: opentelemetry-instrumentation==0.44b0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from opentelemetry-instrumentation-fastapi>=0.41b0->chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (0.44b0)\n", + "Requirement already satisfied: setuptools>=16.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from opentelemetry-instrumentation==0.44b0->opentelemetry-instrumentation-fastapi>=0.41b0->chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (58.1.0)\n", + "Requirement already satisfied: asgiref~=3.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from opentelemetry-instrumentation-asgi==0.44b0->opentelemetry-instrumentation-fastapi>=0.41b0->chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (3.7.2)\n", + "Requirement already satisfied: backoff>=1.10.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from posthog>=2.4.0->chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (2.2.1)\n", + "Requirement already satisfied: monotonic>=1.5 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from posthog>=2.4.0->chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (1.6)\n", + "Requirement already satisfied: watchfiles>=0.13 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from uvicorn[standard]>=0.18.3->chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (0.21.0)\n", + "Requirement already satisfied: uvloop!=0.15.0,!=0.15.1,>=0.14.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from uvicorn[standard]>=0.18.3->chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (0.19.0)\n", + "Requirement already satisfied: httptools>=0.5.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from uvicorn[standard]>=0.18.3->chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (0.6.1)\n", + "Requirement already satisfied: python-dotenv>=0.13 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from uvicorn[standard]>=0.18.3->chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (1.0.1)\n", + "Requirement already satisfied: websockets>=10.4 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from uvicorn[standard]>=0.18.3->chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (12.0)\n", + "Requirement already satisfied: humanfriendly>=9.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from coloredlogs->onnxruntime<2.0.0,>=1.17.0->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (10.0)\n", + "Requirement already satisfied: mpmath>=0.19 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from sympy->onnxruntime<2.0.0,>=1.17.0->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (1.3.0)\n", + "Requirement already satisfied: rsa<5,>=3.1.4 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from google-auth>=1.0.1->kubernetes>=28.1.0->chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (4.9)\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Requirement already satisfied: cachetools<6.0,>=2.0.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from google-auth>=1.0.1->kubernetes>=28.1.0->chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (5.3.2)\n", + "Requirement already satisfied: pyasn1-modules>=0.2.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from google-auth>=1.0.1->kubernetes>=28.1.0->chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (0.3.0)\n", + "Requirement already satisfied: zipp>=0.5 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from importlib-metadata<7.0,>=6.0->opentelemetry-api>=1.2.0->chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (3.17.0)\n", + "Requirement already satisfied: pyasn1<0.6.0,>=0.4.6 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from pyasn1-modules>=0.2.1->google-auth>=1.0.1->kubernetes>=28.1.0->chromadb<0.5.0,>=0.4.22->llama-index-vector-stores-chroma<0.2.0,>=0.1.1->llama-index-cli<0.2.0,>=0.1.2->llama-index) (0.5.1)\n", + "\u001b[33mWARNING: You are using pip version 22.0.4; however, version 24.0 is available.\n", + "You should consider upgrading via the '/Users/daniel/.pyenv/versions/3.10.3/bin/python3.10 -m pip install --upgrade pip' command.\u001b[0m\u001b[33m\n", + "\u001b[0mRequirement already satisfied: llama-index-llms-groq in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (0.1.3)\n", + "Requirement already satisfied: llama-index-core<0.11.0,>=0.10.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-llms-groq) (0.10.16.post1)\n", + "Requirement already satisfied: llama-index-llms-openai-like<0.2.0,>=0.1.3 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-llms-groq) (0.1.3)\n", + "Requirement already satisfied: openai>=1.1.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (1.13.3)\n", + "Requirement already satisfied: requests>=2.31.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (2.31.0)\n", + "Requirement already satisfied: SQLAlchemy[asyncio]>=1.4.49 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (2.0.25)\n", + "Requirement already satisfied: dataclasses-json in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (0.6.4)\n", + "Requirement already satisfied: pillow>=9.0.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (10.2.0)\n", + "Requirement already satisfied: aiohttp<4.0.0,>=3.8.6 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (3.9.3)\n", + "Requirement already satisfied: httpx in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (0.26.0)\n", + "Requirement already satisfied: numpy in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (1.23.4)\n", + "Requirement already satisfied: pandas in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (1.5.1)\n", + "Requirement already satisfied: deprecated>=1.2.9.3 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (1.2.14)\n", + "Requirement already satisfied: tenacity<9.0.0,>=8.2.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (8.2.3)\n", + "Requirement already satisfied: tqdm<5.0.0,>=4.66.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (4.66.1)\n", + "Requirement already satisfied: nltk<4.0.0,>=3.8.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (3.8.1)\n", + "Requirement already satisfied: fsspec>=2023.5.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (2024.2.0)\n", + "Requirement already satisfied: llamaindex-py-client<0.2.0,>=0.1.13 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (0.1.13)\n", + "Requirement already satisfied: networkx>=3.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (3.2.1)\n", + "Requirement already satisfied: dirtyjson<2.0.0,>=1.0.8 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (1.0.8)\n", + "Requirement already satisfied: tiktoken>=0.3.3 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (0.5.2)\n", + "Requirement already satisfied: nest-asyncio<2.0.0,>=1.5.8 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (1.6.0)\n", + "Requirement already satisfied: PyYAML>=6.0.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (6.0.1)\n", + "Requirement already satisfied: typing-inspect>=0.8.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (0.9.0)\n", + "Requirement already satisfied: typing-extensions>=4.5.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (4.9.0)\n", + "Requirement already satisfied: transformers<5.0.0,>=4.37.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-llms-openai-like<0.2.0,>=0.1.3->llama-index-llms-groq) (4.37.2)\n", + "Requirement already satisfied: llama-index-llms-openai<0.2.0,>=0.1.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-llms-openai-like<0.2.0,>=0.1.3->llama-index-llms-groq) (0.1.7)\n", + "Requirement already satisfied: yarl<2.0,>=1.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (1.9.4)\n", + "Requirement already satisfied: multidict<7.0,>=4.5 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (6.0.5)\n", + "Requirement already satisfied: aiosignal>=1.1.2 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (1.3.1)\n", + "Requirement already satisfied: frozenlist>=1.1.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (1.4.1)\n", + "Requirement already satisfied: async-timeout<5.0,>=4.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (4.0.3)\n", + "Requirement already satisfied: attrs>=17.3.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (22.1.0)\n", + "Requirement already satisfied: wrapt<2,>=1.10 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from deprecated>=1.2.9.3->llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (1.16.0)\n", + "Requirement already satisfied: pydantic>=1.10 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llamaindex-py-client<0.2.0,>=0.1.13->llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (2.5.1)\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Requirement already satisfied: anyio in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from httpx->llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (3.7.1)\n", + "Requirement already satisfied: idna in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from httpx->llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (3.4)\n", + "Requirement already satisfied: certifi in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from httpx->llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (2024.2.2)\n", + "Requirement already satisfied: httpcore==1.* in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from httpx->llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (1.0.2)\n", + "Requirement already satisfied: sniffio in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from httpx->llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (1.3.0)\n", + "Requirement already satisfied: h11<0.15,>=0.13 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from httpcore==1.*->httpx->llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (0.14.0)\n", + "Requirement already satisfied: joblib in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from nltk<4.0.0,>=3.8.1->llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (1.3.2)\n", + "Requirement already satisfied: click in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from nltk<4.0.0,>=3.8.1->llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (8.1.7)\n", + "Requirement already satisfied: regex>=2021.8.3 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from nltk<4.0.0,>=3.8.1->llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (2023.12.25)\n", + "Requirement already satisfied: distro<2,>=1.7.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from openai>=1.1.0->llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (1.9.0)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from requests>=2.31.0->llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (2.2.0)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from requests>=2.31.0->llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (3.3.2)\n", + "Requirement already satisfied: greenlet!=0.4.17 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from SQLAlchemy[asyncio]>=1.4.49->llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (3.0.1)\n", + "Requirement already satisfied: safetensors>=0.4.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from transformers<5.0.0,>=4.37.0->llama-index-llms-openai-like<0.2.0,>=0.1.3->llama-index-llms-groq) (0.4.2)\n", + "Requirement already satisfied: huggingface-hub<1.0,>=0.19.3 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from transformers<5.0.0,>=4.37.0->llama-index-llms-openai-like<0.2.0,>=0.1.3->llama-index-llms-groq) (0.20.3)\n", + "Requirement already satisfied: packaging>=20.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from transformers<5.0.0,>=4.37.0->llama-index-llms-openai-like<0.2.0,>=0.1.3->llama-index-llms-groq) (23.2)\n", + "Requirement already satisfied: tokenizers<0.19,>=0.14 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from transformers<5.0.0,>=4.37.0->llama-index-llms-openai-like<0.2.0,>=0.1.3->llama-index-llms-groq) (0.15.1)\n", + "Requirement already satisfied: filelock in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from transformers<5.0.0,>=4.37.0->llama-index-llms-openai-like<0.2.0,>=0.1.3->llama-index-llms-groq) (3.13.1)\n", + "Requirement already satisfied: mypy-extensions>=0.3.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from typing-inspect>=0.8.0->llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (1.0.0)\n", + "Requirement already satisfied: marshmallow<4.0.0,>=3.18.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from dataclasses-json->llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (3.20.2)\n", + "Requirement already satisfied: pytz>=2020.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from pandas->llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (2022.5)\n", + "Requirement already satisfied: python-dateutil>=2.8.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from pandas->llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (2.8.2)\n", + "Requirement already satisfied: exceptiongroup in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from anyio->httpx->llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (1.2.0)\n", + "Requirement already satisfied: annotated-types>=0.4.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from pydantic>=1.10->llamaindex-py-client<0.2.0,>=0.1.13->llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (0.6.0)\n", + "Requirement already satisfied: pydantic-core==2.14.3 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from pydantic>=1.10->llamaindex-py-client<0.2.0,>=0.1.13->llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (2.14.3)\n", + "Requirement already satisfied: six>=1.5 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from python-dateutil>=2.8.1->pandas->llama-index-core<0.11.0,>=0.10.1->llama-index-llms-groq) (1.16.0)\n", + "\u001b[33mWARNING: You are using pip version 22.0.4; however, version 24.0 is available.\n", + "You should consider upgrading via the '/Users/daniel/.pyenv/versions/3.10.3/bin/python3.10 -m pip install --upgrade pip' command.\u001b[0m\u001b[33m\n", + "\u001b[0mRequirement already satisfied: llama-index-embeddings-huggingface in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (0.2.0)\n", + "Requirement already satisfied: sentence-transformers<3.0.0,>=2.6.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-embeddings-huggingface) (2.7.0)\n", + "Requirement already satisfied: huggingface-hub[inference]>=0.19.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-embeddings-huggingface) (0.20.3)\n", + "Requirement already satisfied: llama-index-core<0.11.0,>=0.10.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-embeddings-huggingface) (0.10.16.post1)\n", + "Requirement already satisfied: packaging>=20.9 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from huggingface-hub[inference]>=0.19.0->llama-index-embeddings-huggingface) (23.2)\n", + "Requirement already satisfied: requests in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from huggingface-hub[inference]>=0.19.0->llama-index-embeddings-huggingface) (2.31.0)\n", + "Requirement already satisfied: typing-extensions>=3.7.4.3 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from huggingface-hub[inference]>=0.19.0->llama-index-embeddings-huggingface) (4.9.0)\n", + "Requirement already satisfied: fsspec>=2023.5.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from huggingface-hub[inference]>=0.19.0->llama-index-embeddings-huggingface) (2024.2.0)\n", + "Requirement already satisfied: filelock in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from huggingface-hub[inference]>=0.19.0->llama-index-embeddings-huggingface) (3.13.1)\n", + "Requirement already satisfied: pyyaml>=5.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from huggingface-hub[inference]>=0.19.0->llama-index-embeddings-huggingface) (6.0.1)\n", + "Requirement already satisfied: tqdm>=4.42.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from huggingface-hub[inference]>=0.19.0->llama-index-embeddings-huggingface) (4.66.1)\n", + "Requirement already satisfied: aiohttp in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from huggingface-hub[inference]>=0.19.0->llama-index-embeddings-huggingface) (3.9.3)\n", + "Requirement already satisfied: pydantic<3.0,>1.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from huggingface-hub[inference]>=0.19.0->llama-index-embeddings-huggingface) (2.5.1)\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Requirement already satisfied: nest-asyncio<2.0.0,>=1.5.8 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (1.6.0)\n", + "Requirement already satisfied: tiktoken>=0.3.3 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (0.5.2)\n", + "Requirement already satisfied: SQLAlchemy[asyncio]>=1.4.49 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (2.0.25)\n", + "Requirement already satisfied: typing-inspect>=0.8.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (0.9.0)\n", + "Requirement already satisfied: deprecated>=1.2.9.3 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (1.2.14)\n", + "Requirement already satisfied: pandas in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (1.5.1)\n", + "Requirement already satisfied: llamaindex-py-client<0.2.0,>=0.1.13 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (0.1.13)\n", + "Requirement already satisfied: nltk<4.0.0,>=3.8.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (3.8.1)\n", + "Requirement already satisfied: tenacity<9.0.0,>=8.2.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (8.2.3)\n", + "Requirement already satisfied: dirtyjson<2.0.0,>=1.0.8 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (1.0.8)\n", + "Requirement already satisfied: numpy in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (1.23.4)\n", + "Requirement already satisfied: pillow>=9.0.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (10.2.0)\n", + "Requirement already satisfied: dataclasses-json in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (0.6.4)\n", + "Requirement already satisfied: networkx>=3.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (3.2.1)\n", + "Requirement already satisfied: httpx in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (0.26.0)\n", + "Requirement already satisfied: openai>=1.1.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (1.13.3)\n", + "Requirement already satisfied: scipy in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from sentence-transformers<3.0.0,>=2.6.1->llama-index-embeddings-huggingface) (1.12.0)\n", + "Requirement already satisfied: torch>=1.11.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from sentence-transformers<3.0.0,>=2.6.1->llama-index-embeddings-huggingface) (2.2.0)\n", + "Requirement already satisfied: transformers<5.0.0,>=4.34.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from sentence-transformers<3.0.0,>=2.6.1->llama-index-embeddings-huggingface) (4.37.2)\n", + "Requirement already satisfied: scikit-learn in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from sentence-transformers<3.0.0,>=2.6.1->llama-index-embeddings-huggingface) (1.4.0)\n", + "Requirement already satisfied: multidict<7.0,>=4.5 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from aiohttp->huggingface-hub[inference]>=0.19.0->llama-index-embeddings-huggingface) (6.0.5)\n", + "Requirement already satisfied: aiosignal>=1.1.2 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from aiohttp->huggingface-hub[inference]>=0.19.0->llama-index-embeddings-huggingface) (1.3.1)\n", + "Requirement already satisfied: frozenlist>=1.1.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from aiohttp->huggingface-hub[inference]>=0.19.0->llama-index-embeddings-huggingface) (1.4.1)\n", + "Requirement already satisfied: yarl<2.0,>=1.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from aiohttp->huggingface-hub[inference]>=0.19.0->llama-index-embeddings-huggingface) (1.9.4)\n", + "Requirement already satisfied: attrs>=17.3.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from aiohttp->huggingface-hub[inference]>=0.19.0->llama-index-embeddings-huggingface) (22.1.0)\n", + "Requirement already satisfied: async-timeout<5.0,>=4.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from aiohttp->huggingface-hub[inference]>=0.19.0->llama-index-embeddings-huggingface) (4.0.3)\n", + "Requirement already satisfied: wrapt<2,>=1.10 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from deprecated>=1.2.9.3->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (1.16.0)\n", + "Requirement already satisfied: certifi in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from httpx->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (2024.2.2)\n", + "Requirement already satisfied: idna in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from httpx->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (3.4)\n", + "Requirement already satisfied: httpcore==1.* in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from httpx->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (1.0.2)\n", + "Requirement already satisfied: anyio in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from httpx->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (3.7.1)\n", + "Requirement already satisfied: sniffio in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from httpx->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (1.3.0)\n", + "Requirement already satisfied: h11<0.15,>=0.13 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from httpcore==1.*->httpx->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (0.14.0)\n", + "Requirement already satisfied: click in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from nltk<4.0.0,>=3.8.1->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (8.1.7)\n", + "Requirement already satisfied: joblib in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from nltk<4.0.0,>=3.8.1->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (1.3.2)\n", + "Requirement already satisfied: regex>=2021.8.3 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from nltk<4.0.0,>=3.8.1->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (2023.12.25)\n", + "Requirement already satisfied: distro<2,>=1.7.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from openai>=1.1.0->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (1.9.0)\n", + "Requirement already satisfied: pydantic-core==2.14.3 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from pydantic<3.0,>1.1->huggingface-hub[inference]>=0.19.0->llama-index-embeddings-huggingface) (2.14.3)\n", + "Requirement already satisfied: annotated-types>=0.4.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from pydantic<3.0,>1.1->huggingface-hub[inference]>=0.19.0->llama-index-embeddings-huggingface) (0.6.0)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from requests->huggingface-hub[inference]>=0.19.0->llama-index-embeddings-huggingface) (2.2.0)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from requests->huggingface-hub[inference]>=0.19.0->llama-index-embeddings-huggingface) (3.3.2)\n", + "Requirement already satisfied: greenlet!=0.4.17 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from SQLAlchemy[asyncio]>=1.4.49->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (3.0.1)\n", + "Requirement already satisfied: sympy in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from torch>=1.11.0->sentence-transformers<3.0.0,>=2.6.1->llama-index-embeddings-huggingface) (1.12)\n", + "Requirement already satisfied: jinja2 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from torch>=1.11.0->sentence-transformers<3.0.0,>=2.6.1->llama-index-embeddings-huggingface) (3.1.2)\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Requirement already satisfied: safetensors>=0.4.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from transformers<5.0.0,>=4.34.0->sentence-transformers<3.0.0,>=2.6.1->llama-index-embeddings-huggingface) (0.4.2)\n", + "Requirement already satisfied: tokenizers<0.19,>=0.14 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from transformers<5.0.0,>=4.34.0->sentence-transformers<3.0.0,>=2.6.1->llama-index-embeddings-huggingface) (0.15.1)\n", + "Requirement already satisfied: mypy-extensions>=0.3.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from typing-inspect>=0.8.0->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (1.0.0)\n", + "Requirement already satisfied: marshmallow<4.0.0,>=3.18.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from dataclasses-json->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (3.20.2)\n", + "Requirement already satisfied: python-dateutil>=2.8.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from pandas->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (2.8.2)\n", + "Requirement already satisfied: pytz>=2020.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from pandas->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (2022.5)\n", + "Requirement already satisfied: threadpoolctl>=2.0.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from scikit-learn->sentence-transformers<3.0.0,>=2.6.1->llama-index-embeddings-huggingface) (3.2.0)\n", + "Requirement already satisfied: exceptiongroup in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from anyio->httpx->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (1.2.0)\n", + "Requirement already satisfied: six>=1.5 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from python-dateutil>=2.8.1->pandas->llama-index-core<0.11.0,>=0.10.1->llama-index-embeddings-huggingface) (1.16.0)\n", + "Requirement already satisfied: MarkupSafe>=2.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from jinja2->torch>=1.11.0->sentence-transformers<3.0.0,>=2.6.1->llama-index-embeddings-huggingface) (2.1.1)\n", + "Requirement already satisfied: mpmath>=0.19 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from sympy->torch>=1.11.0->sentence-transformers<3.0.0,>=2.6.1->llama-index-embeddings-huggingface) (1.3.0)\n", + "\u001b[33mWARNING: You are using pip version 22.0.4; however, version 24.0 is available.\n", + "You should consider upgrading via the '/Users/daniel/.pyenv/versions/3.10.3/bin/python3.10 -m pip install --upgrade pip' command.\u001b[0m\u001b[33m\n", + "\u001b[0mRequirement already satisfied: llama-parse in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (0.3.7)\n", + "Requirement already satisfied: llama-index-core>=0.10.7 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-parse) (0.10.16.post1)\n", + "Requirement already satisfied: SQLAlchemy[asyncio]>=1.4.49 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core>=0.10.7->llama-parse) (2.0.25)\n", + "Requirement already satisfied: httpx in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core>=0.10.7->llama-parse) (0.26.0)\n", + "Requirement already satisfied: nest-asyncio<2.0.0,>=1.5.8 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core>=0.10.7->llama-parse) (1.6.0)\n", + "Requirement already satisfied: aiohttp<4.0.0,>=3.8.6 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core>=0.10.7->llama-parse) (3.9.3)\n", + "Requirement already satisfied: typing-inspect>=0.8.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core>=0.10.7->llama-parse) (0.9.0)\n", + "Requirement already satisfied: tenacity<9.0.0,>=8.2.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core>=0.10.7->llama-parse) (8.2.3)\n", + "Requirement already satisfied: networkx>=3.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core>=0.10.7->llama-parse) (3.2.1)\n", + "Requirement already satisfied: dirtyjson<2.0.0,>=1.0.8 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core>=0.10.7->llama-parse) (1.0.8)\n", + "Requirement already satisfied: pandas in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core>=0.10.7->llama-parse) (1.5.1)\n", + "Requirement already satisfied: openai>=1.1.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core>=0.10.7->llama-parse) (1.13.3)\n", + "Requirement already satisfied: pillow>=9.0.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core>=0.10.7->llama-parse) (10.2.0)\n", + "Requirement already satisfied: llamaindex-py-client<0.2.0,>=0.1.13 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core>=0.10.7->llama-parse) (0.1.13)\n", + "Requirement already satisfied: nltk<4.0.0,>=3.8.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core>=0.10.7->llama-parse) (3.8.1)\n", + "Requirement already satisfied: fsspec>=2023.5.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core>=0.10.7->llama-parse) (2024.2.0)\n", + "Requirement already satisfied: PyYAML>=6.0.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core>=0.10.7->llama-parse) (6.0.1)\n", + "Requirement already satisfied: deprecated>=1.2.9.3 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core>=0.10.7->llama-parse) (1.2.14)\n", + "Requirement already satisfied: tiktoken>=0.3.3 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core>=0.10.7->llama-parse) (0.5.2)\n", + "Requirement already satisfied: typing-extensions>=4.5.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core>=0.10.7->llama-parse) (4.9.0)\n", + "Requirement already satisfied: numpy in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core>=0.10.7->llama-parse) (1.23.4)\n", + "Requirement already satisfied: requests>=2.31.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core>=0.10.7->llama-parse) (2.31.0)\n", + "Requirement already satisfied: dataclasses-json in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core>=0.10.7->llama-parse) (0.6.4)\n", + "Requirement already satisfied: tqdm<5.0.0,>=4.66.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llama-index-core>=0.10.7->llama-parse) (4.66.1)\n", + "Requirement already satisfied: multidict<7.0,>=4.5 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core>=0.10.7->llama-parse) (6.0.5)\n", + "Requirement already satisfied: yarl<2.0,>=1.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core>=0.10.7->llama-parse) (1.9.4)\n", + "Requirement already satisfied: async-timeout<5.0,>=4.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core>=0.10.7->llama-parse) (4.0.3)\n", + "Requirement already satisfied: aiosignal>=1.1.2 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core>=0.10.7->llama-parse) (1.3.1)\n", + "Requirement already satisfied: attrs>=17.3.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core>=0.10.7->llama-parse) (22.1.0)\n", + "Requirement already satisfied: frozenlist>=1.1.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.6->llama-index-core>=0.10.7->llama-parse) (1.4.1)\n", + "Requirement already satisfied: wrapt<2,>=1.10 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from deprecated>=1.2.9.3->llama-index-core>=0.10.7->llama-parse) (1.16.0)\n", + "Requirement already satisfied: pydantic>=1.10 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from llamaindex-py-client<0.2.0,>=0.1.13->llama-index-core>=0.10.7->llama-parse) (2.5.1)\n", + "Requirement already satisfied: idna in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from httpx->llama-index-core>=0.10.7->llama-parse) (3.4)\n", + "Requirement already satisfied: anyio in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from httpx->llama-index-core>=0.10.7->llama-parse) (3.7.1)\n", + "Requirement already satisfied: httpcore==1.* in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from httpx->llama-index-core>=0.10.7->llama-parse) (1.0.2)\n", + "Requirement already satisfied: sniffio in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from httpx->llama-index-core>=0.10.7->llama-parse) (1.3.0)\n", + "Requirement already satisfied: certifi in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from httpx->llama-index-core>=0.10.7->llama-parse) (2024.2.2)\n", + "Requirement already satisfied: h11<0.15,>=0.13 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from httpcore==1.*->httpx->llama-index-core>=0.10.7->llama-parse) (0.14.0)\n", + "Requirement already satisfied: regex>=2021.8.3 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from nltk<4.0.0,>=3.8.1->llama-index-core>=0.10.7->llama-parse) (2023.12.25)\n", + "Requirement already satisfied: joblib in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from nltk<4.0.0,>=3.8.1->llama-index-core>=0.10.7->llama-parse) (1.3.2)\n", + "Requirement already satisfied: click in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from nltk<4.0.0,>=3.8.1->llama-index-core>=0.10.7->llama-parse) (8.1.7)\n", + "Requirement already satisfied: distro<2,>=1.7.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from openai>=1.1.0->llama-index-core>=0.10.7->llama-parse) (1.9.0)\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Requirement already satisfied: charset-normalizer<4,>=2 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from requests>=2.31.0->llama-index-core>=0.10.7->llama-parse) (3.3.2)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from requests>=2.31.0->llama-index-core>=0.10.7->llama-parse) (2.2.0)\n", + "Requirement already satisfied: greenlet!=0.4.17 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from SQLAlchemy[asyncio]>=1.4.49->llama-index-core>=0.10.7->llama-parse) (3.0.1)\n", + "Requirement already satisfied: mypy-extensions>=0.3.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from typing-inspect>=0.8.0->llama-index-core>=0.10.7->llama-parse) (1.0.0)\n", + "Requirement already satisfied: marshmallow<4.0.0,>=3.18.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from dataclasses-json->llama-index-core>=0.10.7->llama-parse) (3.20.2)\n", + "Requirement already satisfied: pytz>=2020.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from pandas->llama-index-core>=0.10.7->llama-parse) (2022.5)\n", + "Requirement already satisfied: python-dateutil>=2.8.1 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from pandas->llama-index-core>=0.10.7->llama-parse) (2.8.2)\n", + "Requirement already satisfied: exceptiongroup in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from anyio->httpx->llama-index-core>=0.10.7->llama-parse) (1.2.0)\n", + "Requirement already satisfied: packaging>=17.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from marshmallow<4.0.0,>=3.18.0->dataclasses-json->llama-index-core>=0.10.7->llama-parse) (23.2)\n", + "Requirement already satisfied: annotated-types>=0.4.0 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from pydantic>=1.10->llamaindex-py-client<0.2.0,>=0.1.13->llama-index-core>=0.10.7->llama-parse) (0.6.0)\n", + "Requirement already satisfied: pydantic-core==2.14.3 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from pydantic>=1.10->llamaindex-py-client<0.2.0,>=0.1.13->llama-index-core>=0.10.7->llama-parse) (2.14.3)\n", + "Requirement already satisfied: six>=1.5 in /Users/daniel/.pyenv/versions/3.10.3/lib/python3.10/site-packages (from python-dateutil>=2.8.1->pandas->llama-index-core>=0.10.7->llama-parse) (1.16.0)\n", + "\u001b[33mWARNING: You are using pip version 22.0.4; however, version 24.0 is available.\n", + "You should consider upgrading via the '/Users/daniel/.pyenv/versions/3.10.3/bin/python3.10 -m pip install --upgrade pip' command.\u001b[0m\u001b[33m\n", + "\u001b[0m" + ] + } + ], + "source": [ + "!pip install llama-index\n", + "!pip install llama-index-llms-groq\n", + "!pip install llama-index-embeddings-huggingface\n", + "!pip install llama-parse" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "641fa5c8-d63e-47f8-b5bc-ebf994f6e314", + "metadata": {}, + "outputs": [], + "source": [ + "import nest_asyncio\n", + "\n", + "nest_asyncio.apply()" + ] + }, + { + "cell_type": "markdown", + "id": "1714ea83-6cd4-44bb-b53f-4499126c3809", + "metadata": {}, + "source": [ + "### Setup LLM using Groq\n", + "\n", + "To use [Groq](https://groq.com), you need to make sure that `GROQ_API_KEY` is specified as an environment variable." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "5d46440c", + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "\n", + "os.environ[\"GROQ_API_KEY\"] = \"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\"" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "d5256970-eba4-499a-b438-8766a290a61a", + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.llms.groq import Groq\n", + "\n", + "llm = Groq(model=\"llama3-8b-8192\")\n", + "llm_70b = Groq(model=\"llama3-70b-8192\")" + ] + }, + { + "cell_type": "markdown", + "id": "41c3f154-d345-465d-8eed-63b99adbd3ca", + "metadata": {}, + "source": [ + "### Setup Embedding Model" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "0cda736d-e414-44e3-8c15-6be49f5f0282", + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.embeddings.huggingface import HuggingFaceEmbedding\n", + "\n", + "embed_model = HuggingFaceEmbedding(model_name=\"BAAI/bge-small-en-v1.5\")" + ] + }, + { + "cell_type": "markdown", + "id": "3625cf29-7c56-475a-8efd-fbe8ffce194d", + "metadata": {}, + "source": [ + "### Define Global Settings Configuration\n", + "\n", + "In LlamaIndex, you can define global settings so you don't have to pass the LLM / embedding model objects everywhere." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "be3565d1-cc5b-4149-ad5a-7be8f7818e0c", + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core import Settings\n", + "\n", + "Settings.llm = llm\n", + "Settings.embed_model = embed_model" + ] + }, + { + "cell_type": "markdown", + "id": "42449b68-47f5-40cf-9207-191307b25e8e", + "metadata": {}, + "source": [ + "### Download Data\n", + "\n", + "Here you'll download data that's used in section 2 and onwards.\n", + "\n", + "We'll download some articles on Kendrick, Drake, and their beef (as of May 2024)." + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "59b18640-cdfa-42c1-ab53-115983c1fdc4", + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "mkdir: data: File exists\n", + "--2024-05-20 09:27:56-- https://www.dropbox.com/scl/fi/t1soxfjdp0v44an6sdymd/drake_kendrick_beef.pdf?rlkey=u9546ymb7fj8lk2v64r6p5r5k&st=wjzzrgil&dl=1\n", + "Resolving www.dropbox.com (www.dropbox.com)... 2620:100:6019:18::a27d:412, 162.125.4.18\n", + "Connecting to www.dropbox.com (www.dropbox.com)|2620:100:6019:18::a27d:412|:443... connected.\n", + "HTTP request sent, awaiting response... 302 Found\n", + "Location: https://uc4425830a1d2d4c42bbf6c89b7f.dl.dropboxusercontent.com/cd/0/inline/CTQhAFm1iI5gNTeE_NytPzfcLl6Ilp9PSwNsVHJg7h_C2mUfnd6DL__txef3V5PoEV68APiuzt1UaHr4GVFHs-iYtSYqNJ9YT-chZyGn5GTRT837J92mPPDHpPnxibg3FCE/file?dl=1# [following]\n", + "--2024-05-20 09:27:57-- https://uc4425830a1d2d4c42bbf6c89b7f.dl.dropboxusercontent.com/cd/0/inline/CTQhAFm1iI5gNTeE_NytPzfcLl6Ilp9PSwNsVHJg7h_C2mUfnd6DL__txef3V5PoEV68APiuzt1UaHr4GVFHs-iYtSYqNJ9YT-chZyGn5GTRT837J92mPPDHpPnxibg3FCE/file?dl=1\n", + "Resolving uc4425830a1d2d4c42bbf6c89b7f.dl.dropboxusercontent.com (uc4425830a1d2d4c42bbf6c89b7f.dl.dropboxusercontent.com)... 2620:100:6019:15::a27d:40f, 162.125.4.15\n", + "Connecting to uc4425830a1d2d4c42bbf6c89b7f.dl.dropboxusercontent.com (uc4425830a1d2d4c42bbf6c89b7f.dl.dropboxusercontent.com)|2620:100:6019:15::a27d:40f|:443... connected.\n", + "HTTP request sent, awaiting response... 302 Found\n", + "Location: /cd/0/inline2/CTTKkMZQK-Fk13zt0Wc04FPhWEZ2Mfy-DhMgx4k3kmgqTZFkhDUieUVZNJ5S9fESwn1XTt68Cm6-T9FuNDFxv0SE7JN8WtpJJaZHbV4EfVkffGctU9aiy7m_xfo8OViwDmMo3PeRerVdwDilsblJLH0Z9_eeVicSjRCQh03eeybgZZr_zzF6ydj5V9evnXEhVp0CmBs-DfNL3s-AbIZ4nYwFLmrufsyw17rSqLDDmbIUQxV349HByliOgJqdZ-C-gH0-MaBSnIa3g88T8RvxAzyrdNpEdJoEvCVqOYdl2JtKleQYxuR4XO4EHxJWTwNj735jMjHf1rQVkRcSx71MYrL-YSkvVYQBhoCUwxJoNIvaeg/file?dl=1 [following]\n", + "--2024-05-20 09:27:58-- https://uc4425830a1d2d4c42bbf6c89b7f.dl.dropboxusercontent.com/cd/0/inline2/CTTKkMZQK-Fk13zt0Wc04FPhWEZ2Mfy-DhMgx4k3kmgqTZFkhDUieUVZNJ5S9fESwn1XTt68Cm6-T9FuNDFxv0SE7JN8WtpJJaZHbV4EfVkffGctU9aiy7m_xfo8OViwDmMo3PeRerVdwDilsblJLH0Z9_eeVicSjRCQh03eeybgZZr_zzF6ydj5V9evnXEhVp0CmBs-DfNL3s-AbIZ4nYwFLmrufsyw17rSqLDDmbIUQxV349HByliOgJqdZ-C-gH0-MaBSnIa3g88T8RvxAzyrdNpEdJoEvCVqOYdl2JtKleQYxuR4XO4EHxJWTwNj735jMjHf1rQVkRcSx71MYrL-YSkvVYQBhoCUwxJoNIvaeg/file?dl=1\n", + "Reusing existing connection to [uc4425830a1d2d4c42bbf6c89b7f.dl.dropboxusercontent.com]:443.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 49318627 (47M) [application/binary]\n", + "Saving to: ‘data/drake_kendrick_beef.pdf’\n", + "\n", + "data/drake_kendrick 100%[===================>] 47.03M 32.9MB/s in 1.4s \n", + "\n", + "2024-05-20 09:28:00 (32.9 MB/s) - ‘data/drake_kendrick_beef.pdf’ saved [49318627/49318627]\n", + "\n", + "--2024-05-20 09:28:00-- https://www.dropbox.com/scl/fi/nts3n64s6kymner2jppd6/drake.pdf?rlkey=hksirpqwzlzqoejn55zemk6ld&st=mohyfyh4&dl=1\n", + "Resolving www.dropbox.com (www.dropbox.com)... 2620:100:6019:18::a27d:412, 162.125.4.18\n", + "Connecting to www.dropbox.com (www.dropbox.com)|2620:100:6019:18::a27d:412|:443... connected.\n", + "HTTP request sent, awaiting response... 302 Found\n", + "Location: https://uc306cc6b72bb0c6b4807adfbf69.dl.dropboxusercontent.com/cd/0/inline/CTTKsxu4SC50fGZs5aEVnvyeCyoCcebsEJLbgiKc-zs4xz7qUrHw3KfJmFvC3LCbaD1qeP5FE5Z_irFNBzYG-4Nbr3sR0f4AY7GrHUOtSMzmtVCS1G2okbjCLLOoj8Urdkw/file?dl=1# [following]\n", + "--2024-05-20 09:28:01-- https://uc306cc6b72bb0c6b4807adfbf69.dl.dropboxusercontent.com/cd/0/inline/CTTKsxu4SC50fGZs5aEVnvyeCyoCcebsEJLbgiKc-zs4xz7qUrHw3KfJmFvC3LCbaD1qeP5FE5Z_irFNBzYG-4Nbr3sR0f4AY7GrHUOtSMzmtVCS1G2okbjCLLOoj8Urdkw/file?dl=1\n", + "Resolving uc306cc6b72bb0c6b4807adfbf69.dl.dropboxusercontent.com (uc306cc6b72bb0c6b4807adfbf69.dl.dropboxusercontent.com)... 2620:100:6019:15::a27d:40f, 162.125.4.15\n", + "Connecting to uc306cc6b72bb0c6b4807adfbf69.dl.dropboxusercontent.com (uc306cc6b72bb0c6b4807adfbf69.dl.dropboxusercontent.com)|2620:100:6019:15::a27d:40f|:443... connected.\n", + "HTTP request sent, awaiting response... 302 Found\n", + "Location: /cd/0/inline2/CTQv1f9QtlDimE_MTAN-OEDn6BGT9UTJ8QjgwkGGhcWJN5O_F7cNTeAlo6ThMraOXNh9P9ENA-IS08GWOU9Pu1cQPyxsjiT8o0_KZRwsjrPam9a_bZ0uydRciFz3i6PRI8EwAAAHD7V-XibNLg9uv5b_-jKxg6SXmIMuN7ZUItSKxKyhfg0YF0UeOp7BgEnjabJIfXTFSD0y4_Kvnl3_isvMbBUZ6os7vOsnjjgN2eLGNHVnfEdbSlBSw1cGsXA1ZRwR3NwF05BIZT-Lsgspw8TPN4updOfgCXsSERWFHDmiKLozDCU3UPWh1QAEVTct9mW3vRHIGQ7i8xr1nO7h8lR_VSMJ-C9Ep40O2rjeEGbKEQ/file?dl=1 [following]\n", + "--2024-05-20 09:28:01-- https://uc306cc6b72bb0c6b4807adfbf69.dl.dropboxusercontent.com/cd/0/inline2/CTQv1f9QtlDimE_MTAN-OEDn6BGT9UTJ8QjgwkGGhcWJN5O_F7cNTeAlo6ThMraOXNh9P9ENA-IS08GWOU9Pu1cQPyxsjiT8o0_KZRwsjrPam9a_bZ0uydRciFz3i6PRI8EwAAAHD7V-XibNLg9uv5b_-jKxg6SXmIMuN7ZUItSKxKyhfg0YF0UeOp7BgEnjabJIfXTFSD0y4_Kvnl3_isvMbBUZ6os7vOsnjjgN2eLGNHVnfEdbSlBSw1cGsXA1ZRwR3NwF05BIZT-Lsgspw8TPN4updOfgCXsSERWFHDmiKLozDCU3UPWh1QAEVTct9mW3vRHIGQ7i8xr1nO7h8lR_VSMJ-C9Ep40O2rjeEGbKEQ/file?dl=1\n", + "Reusing existing connection to [uc306cc6b72bb0c6b4807adfbf69.dl.dropboxusercontent.com]:443.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 4590973 (4.4M) [application/binary]\n", + "Saving to: ‘data/drake.pdf’\n", + "\n", + "data/drake.pdf 100%[===================>] 4.38M 12.0MB/s in 0.4s \n", + "\n", + "2024-05-20 09:28:02 (12.0 MB/s) - ‘data/drake.pdf’ saved [4590973/4590973]\n", + "\n", + "--2024-05-20 09:28:02-- https://www.dropbox.com/scl/fi/8ax2vnoebhmy44bes2n1d/kendrick.pdf?rlkey=fhxvn94t5amdqcv9vshifd3hj&st=dxdtytn6&dl=1\n", + "Resolving www.dropbox.com (www.dropbox.com)... 2620:100:6019:18::a27d:412, 162.125.4.18\n", + "Connecting to www.dropbox.com (www.dropbox.com)|2620:100:6019:18::a27d:412|:443... connected.\n", + "HTTP request sent, awaiting response... 302 Found\n", + "Location: https://uc3ad47fc720b85fdd36566e9669.dl.dropboxusercontent.com/cd/0/inline/CTS6obqeEm8Mzu1a_hWd2GmLrYndc7ctcFK1-6-yM2PPXFyvOsoe9OFDf2ZbCA-mE-19OCycTm4OD8D47idzH09Lf-M501waiDDcEDejhhFjgJr5wABuD4FV4kKtLgecZhI/file?dl=1# [following]\n", + "--2024-05-20 09:28:03-- https://uc3ad47fc720b85fdd36566e9669.dl.dropboxusercontent.com/cd/0/inline/CTS6obqeEm8Mzu1a_hWd2GmLrYndc7ctcFK1-6-yM2PPXFyvOsoe9OFDf2ZbCA-mE-19OCycTm4OD8D47idzH09Lf-M501waiDDcEDejhhFjgJr5wABuD4FV4kKtLgecZhI/file?dl=1\n", + "Resolving uc3ad47fc720b85fdd36566e9669.dl.dropboxusercontent.com (uc3ad47fc720b85fdd36566e9669.dl.dropboxusercontent.com)... 2620:100:6019:15::a27d:40f, 162.125.4.15\n", + "Connecting to uc3ad47fc720b85fdd36566e9669.dl.dropboxusercontent.com (uc3ad47fc720b85fdd36566e9669.dl.dropboxusercontent.com)|2620:100:6019:15::a27d:40f|:443... connected.\n", + "HTTP request sent, awaiting response... 302 Found\n", + "Location: /cd/0/inline2/CTQNM0rmZNvzX5Lwg1iXBmqIz4EJ2ZhyZOITdANOekmgSe03MihquuCWfGxT8LH24oZNn9uwX1HUqaRF2BHUzBsQEiTEvONnVsh7d6pcpd0O0TV-_vyKIQn26qk4cCTpHEy-GcRIKa1opOd-degk9giPIli7-IJsS0WL6EIchoA74Homi43Qmo-Tarf8lF70O9b7eN8AjsjQZ6PFJl8EcRy0s_ox30TH93GvN3NQh_2lVmD3n8f1xPSrLRcyIFyzWJN0GZzTeYrAX-bAPF8IbW_2laURmBVYT1fg4vHdwH0wMFfJR7WDfY5XRWYyRVia6m6VwTVuWW-fddR4jW9HSXvBX8YjnjrwAwNum_jnbOpJTg/file?dl=1 [following]\n", + "--2024-05-20 09:28:03-- https://uc3ad47fc720b85fdd36566e9669.dl.dropboxusercontent.com/cd/0/inline2/CTQNM0rmZNvzX5Lwg1iXBmqIz4EJ2ZhyZOITdANOekmgSe03MihquuCWfGxT8LH24oZNn9uwX1HUqaRF2BHUzBsQEiTEvONnVsh7d6pcpd0O0TV-_vyKIQn26qk4cCTpHEy-GcRIKa1opOd-degk9giPIli7-IJsS0WL6EIchoA74Homi43Qmo-Tarf8lF70O9b7eN8AjsjQZ6PFJl8EcRy0s_ox30TH93GvN3NQh_2lVmD3n8f1xPSrLRcyIFyzWJN0GZzTeYrAX-bAPF8IbW_2laURmBVYT1fg4vHdwH0wMFfJR7WDfY5XRWYyRVia6m6VwTVuWW-fddR4jW9HSXvBX8YjnjrwAwNum_jnbOpJTg/file?dl=1\n", + "Reusing existing connection to [uc3ad47fc720b85fdd36566e9669.dl.dropboxusercontent.com]:443.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 5595364 (5.3M) [application/binary]\n", + "Saving to: ‘data/kendrick.pdf’\n", + "\n", + "data/kendrick.pdf 100%[===================>] 5.34M 11.4MB/s in 0.5s \n", + "\n", + "2024-05-20 09:28:04 (11.4 MB/s) - ‘data/kendrick.pdf’ saved [5595364/5595364]\n", + "\n" + ] + } + ], + "source": [ + "!mkdir data\n", + "!wget \"https://www.dropbox.com/scl/fi/t1soxfjdp0v44an6sdymd/drake_kendrick_beef.pdf?rlkey=u9546ymb7fj8lk2v64r6p5r5k&st=wjzzrgil&dl=1\" -O data/drake_kendrick_beef.pdf\n", + "!wget \"https://www.dropbox.com/scl/fi/nts3n64s6kymner2jppd6/drake.pdf?rlkey=hksirpqwzlzqoejn55zemk6ld&st=mohyfyh4&dl=1\" -O data/drake.pdf\n", + "!wget \"https://www.dropbox.com/scl/fi/8ax2vnoebhmy44bes2n1d/kendrick.pdf?rlkey=fhxvn94t5amdqcv9vshifd3hj&st=dxdtytn6&dl=1\" -O data/kendrick.pdf" + ] + }, + { + "cell_type": "markdown", + "id": "9edee491-05f8-4fbb-9394-baa82f1e5087", + "metadata": {}, + "source": [ + "### Load Data\n", + "\n", + "We load data using LlamaParse by default, but you can also choose to opt for our free pypdf reader (in SimpleDirectoryReader by default) if you don't have an account! \n", + "\n", + "1. LlamaParse: Signup for an account here: cloud.llamaindex.ai. You get 1k free pages a day, and paid plan is 7k free pages + 0.3c per additional page. LlamaParse is a good option if you want to parse complex documents, like PDFs with charts, tables, and more. \n", + "\n", + "2. Default PDF Parser (In `SimpleDirectoryReader`). If you don't want to signup for an account / use a PDF service, just use the default PyPDF reader bundled in our file loader. It's a good choice for getting started!" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "b648635a-2672-407f-bae6-01660e5426d7", + "metadata": {}, + "outputs": [], + "source": [ + "# Uncomment this code if you want to use LlamaParse\n", + "# from llama_parse import LlamaParse\n", + "\n", + "# docs_kendrick = LlamaParse(result_type=\"text\").load_data(\"./data/kendrick.pdf\")\n", + "# docs_drake = LlamaParse(result_type=\"text\").load_data(\"./data/drake.pdf\")\n", + "# docs_both = LlamaParse(result_type=\"text\").load_data(\n", + "# \"./data/drake_kendrick_beef.pdf\"\n", + "# )\n", + "\n", + "# Uncomment this code if you want to use SimpleDirectoryReader / default PDF Parser\n", + "from llama_index.core import SimpleDirectoryReader\n", + "\n", + "docs_kendrick = SimpleDirectoryReader(input_files=[\"data/kendrick.pdf\"]).load_data()\n", + "docs_drake = SimpleDirectoryReader(input_files=[\"data/drake.pdf\"]).load_data()\n", + "docs_both = SimpleDirectoryReader(input_files=[\"data/drake_kendrick_beef.pdf\"]).load_data()" + ] + }, + { + "cell_type": "markdown", + "id": "071a8f44-2765-4d57-b8da-15d3c718874d", + "metadata": {}, + "source": [ + "## 1. Basic Completion and Chat" + ] + }, + { + "cell_type": "markdown", + "id": "c0b1ace8-32fb-46b2-a065-8817ddc0310b", + "metadata": {}, + "source": [ + "### Call complete with a prompt" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "a2db43f9-74af-453c-9f83-8db0379c3302", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "I'm just an AI, I don't have personal preferences or opinions, nor do I have the capacity to enjoy or dislike music. I can provide information and insights about different artists and their work, but I don't have personal feelings or emotions.\n", + "\n", + "However, I can tell you that both Drake and Kendrick Lamar are highly acclaimed and influential artists in the music industry. They have both received widespread critical acclaim and have won numerous awards for their work.\n", + "\n", + "Drake is known for his introspective and emotive lyrics, as well as his ability to blend different genres such as hip-hop, R&B, and pop. He has released several successful albums, including \"Take Care\" and \"Views\".\n", + "\n", + "Kendrick Lamar is known for his socially conscious and thought-provoking lyrics, as well as his unique blend of jazz, funk, and hip-hop. He has released several critically acclaimed albums, including \"Good Kid, M.A.A.D City\" and \"To Pimp a Butterfly\".\n", + "\n", + "Ultimately, whether you prefer Drake or Kendrick Lamar depends on your personal taste in music and the type of music you enjoy.\n" + ] + } + ], + "source": [ + "response = llm.complete(\"do you like drake or kendrick better?\")\n", + "\n", + "print(response)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "89326153-e2d2-4136-8193-fb27d20670c3", + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Man, I'm a die-hard Drake fan, and I gotta say, I love the 6 God for a lot of reasons. Now, I know some people might say Kendrick is the king of hip-hop, and I respect that, but for me, Drake brings something unique to the table that sets him apart.\n", + "\n", + "First of all, Drake's lyrics are so relatable. He's not just rapping about gangsta life or street cred; he's talking about real-life struggles, relationships, and emotions. His songs are like a diary entry, you know? He's sharing his thoughts, feelings, and experiences in a way that resonates with people from all walks of life. I mean, who hasn't been through a breakup or felt like they're stuck in a rut? Drake's music speaks to that.\n", + "\n", + "And let's not forget his storytelling ability. The man can paint a picture with his words. He's got this effortless flow, and his rhymes are like a puzzle – intricate, clever, and always surprising. He's got this ability to weave together complex narratives that keep you engaged from start to finish.\n", + "\n", + "Now, I know some people might say Kendrick's lyrics are more socially conscious, and that's true. But for me, Drake's music is more personal, more intimate. He's not just preaching to the choir; he's sharing his own struggles, fears, and doubts. That vulnerability is what makes his music so powerful.\n", + "\n", + "And let's not forget his production. Drake's got an ear for beats, man. He's always pushing the boundaries of what hip-hop can sound like. From \"Marvin's Room\" to \"God's Plan,\" he's consistently delivered some of the most innovative, catchy, and emotive production in the game.\n", + "\n", + "Now, I'm not saying Kendrick isn't a genius – he is. But for me, Drake's music is more relatable, more personal, and more innovative. He's the perfect blend of street cred and pop sensibility. And let's be real, his flow is unmatched. The man can spit bars like nobody's business.\n", + "\n", + "So, yeah, I'm a Drake fan through and through. I love his music, his message, and his artistry. He's the real MVP, and I'm not ashamed to say it." + ] + } + ], + "source": [ + "stream_response = llm.stream_complete(\n", + " \"you're a drake fan. tell me why you like drake more than kendrick\"\n", + ")\n", + "\n", + "for t in stream_response:\n", + " print(t.delta, end=\"\")" + ] + }, + { + "cell_type": "markdown", + "id": "a4558339-c8a1-4d26-a430-eb71768b5351", + "metadata": {}, + "source": [ + "### Call chat with a list of messages" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "5f393031-f743-4a28-a122-71817e3fbd1b", + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.llms import ChatMessage\n", + "\n", + "messages = [\n", + " ChatMessage(role=\"system\", content=\"You are Kendrick.\"),\n", + " ChatMessage(role=\"user\", content=\"Write a verse.\"),\n", + "]\n", + "response = llm.chat(messages)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "8e9551fc-0efc-4671-bc57-339121004c39", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "assistant: \"I'm the king of the game, no debate\n", + "My rhymes are fire, can't nobody relate\n", + "I'm on a mission, to spread the message wide\n", + "My flow's on a hundred, ain't nobody gonna divide\"\n" + ] + } + ], + "source": [ + "print(response)" + ] + }, + { + "cell_type": "markdown", + "id": "6a67a33d-fe7d-4381-983f-ca3a6945995d", + "metadata": {}, + "source": [ + "## 2. Basic RAG (Vector Search, Summarization)" + ] + }, + { + "cell_type": "markdown", + "id": "c104a0c5-e43b-475b-9fa6-186906c1f327", + "metadata": {}, + "source": [ + "### Basic RAG (Vector Search)" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "216787b7-e40a-43fc-a4ca-c43cb798ce9e", + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core import VectorStoreIndex\n", + "\n", + "index = VectorStoreIndex.from_documents(docs_both)\n", + "query_engine = index.as_query_engine(similarity_top_k=3)" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "id": "a854e9d3-70f1-4927-a2f6-59e90c31f2f0", + "metadata": {}, + "outputs": [], + "source": [ + "response = query_engine.query(\"Tell me about family matters\")" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "da796970-bc38-4cb4-9d32-ebd1b71d4bdc", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Drake's diss track \"Family Matters\" is essentially three songs in one, on three different beats. The track is a seven-and-a-half-minute diss track with an accompanying video.\n" + ] + } + ], + "source": [ + "print(str(response))" + ] + }, + { + "cell_type": "markdown", + "id": "eff935b7-4f37-4758-8997-82fb0852e732", + "metadata": {}, + "source": [ + "### Basic RAG (Summarization)" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "dfe72300-7a38-453e-b1f2-bc1c00a01ff7", + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core import SummaryIndex\n", + "\n", + "summary_index = SummaryIndex.from_documents(docs_both)\n", + "summary_engine = summary_index.as_query_engine()" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "178f1f12-51f7-4b45-9346-c16ed12b3b8d", + "metadata": {}, + "outputs": [], + "source": [ + "response = summary_engine.query(\n", + " \"Given your assessment of this article, who won the beef?\"\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "b8125382-d576-4b99-a0da-2fbb71a5b19b", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "It's difficult to declare a clear winner in this beef, as both parties have delivered strong diss tracks and have been engaging in a back-and-forth exchange.\n" + ] + } + ], + "source": [ + "print(str(response))" + ] + }, + { + "cell_type": "markdown", + "id": "68918eb6-f1e6-460c-b1d5-fb49c3fed4b8", + "metadata": {}, + "source": [ + "## 3. Advanced RAG (Routing)" + ] + }, + { + "cell_type": "markdown", + "id": "94fd7097-0287-4522-8e43-3e088291fa8a", + "metadata": {}, + "source": [ + "### Build a Router that can choose whether to do vector search or summarization" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "id": "3949dd41-e9a1-47f6-900f-4f987cad3f84", + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.tools import QueryEngineTool, ToolMetadata\n", + "\n", + "vector_tool = QueryEngineTool(\n", + " index.as_query_engine(),\n", + " metadata=ToolMetadata(\n", + " name=\"vector_search\",\n", + " description=\"Useful for searching for specific facts.\",\n", + " ),\n", + ")\n", + "\n", + "summary_tool = QueryEngineTool(\n", + " index.as_query_engine(response_mode=\"tree_summarize\"),\n", + " metadata=ToolMetadata(\n", + " name=\"summary\",\n", + " description=\"Useful for summarizing an entire document.\",\n", + " ),\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "id": "d063d07b-c03e-4b26-8556-e3c058d2fd52", + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.query_engine import RouterQueryEngine\n", + "\n", + "query_engine = RouterQueryEngine.from_defaults(\n", + " [vector_tool, summary_tool], select_multi=False, llm=llm_70b\n", + ")\n", + "\n", + "response = query_engine.query(\n", + " \"Tell me about the song meet the grahams - why is it significant\"\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "id": "396aad75-5a71-4bd9-a760-7f13fe223079", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\"Meet the Grahams\" is significant because it marks a turning point in the beef between Kendrick Lamar and Drake. The song is notable for its lighthearted and humorous tone, with Kendrick cracking jokes and making playful jabs at Drake. The track also showcases Kendrick's ability to poke fun at himself and not take himself too seriously.\n" + ] + } + ], + "source": [ + "print(response)" + ] + }, + { + "cell_type": "markdown", + "id": "a795f0bc-e871-4580-8983-6fb27d421fc5", + "metadata": {}, + "source": [ + "## 4. Text-to-SQL \n", + "\n", + "Here, we download and use a sample SQLite database with 11 tables, with various info about music, playlists, and customers. We will limit to a select few tables for this test." + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "id": "a5096501-92c3-41af-a871-ade869d710fb", + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "--2024-05-20 09:31:46-- https://www.sqlitetutorial.net/wp-content/uploads/2018/03/chinook.zip\n", + "Resolving www.sqlitetutorial.net (www.sqlitetutorial.net)... 2606:4700:3037::6815:1e8d, 2606:4700:3037::ac43:acfa, 172.67.172.250, ...\n", + "Connecting to www.sqlitetutorial.net (www.sqlitetutorial.net)|2606:4700:3037::6815:1e8d|:443... connected.\n", + "HTTP request sent, awaiting response... " + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\n", + "To disable this warning, you can either:\n", + "\t- Avoid using `tokenizers` before the fork if possible\n", + "\t- Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "200 OK\n", + "Length: 305596 (298K) [application/zip]\n", + "Saving to: ‘./data/chinook.zip’\n", + "\n", + "./data/chinook.zip 100%[===================>] 298.43K --.-KB/s in 0.07s \n", + "\n", + "2024-05-20 09:31:46 (4.30 MB/s) - ‘./data/chinook.zip’ saved [305596/305596]\n", + "\n", + "Archive: ./data/chinook.zip\n", + "replace chinook.db? [y]es, [n]o, [A]ll, [N]one, [r]ename: " + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\n", + "To disable this warning, you can either:\n", + "\t- Avoid using `tokenizers` before the fork if possible\n", + "\t- Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "^C\r\n" + ] + } + ], + "source": [ + "!wget \"https://www.sqlitetutorial.net/wp-content/uploads/2018/03/chinook.zip\" -O \"./data/chinook.zip\"\n", + "!unzip \"./data/chinook.zip\"" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "id": "d4db989e-c18d-4416-928e-7be4ead4d869", + "metadata": {}, + "outputs": [], + "source": [ + "from sqlalchemy import (\n", + " create_engine,\n", + " MetaData,\n", + " Table,\n", + " Column,\n", + " String,\n", + " Integer,\n", + " select,\n", + " column,\n", + ")\n", + "\n", + "engine = create_engine(\"sqlite:///chinook.db\")" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "id": "bf6ed233-0ea3-4d4f-8c33-5b6d558b89b9", + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core import SQLDatabase\n", + "\n", + "sql_database = SQLDatabase(engine)" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "id": "debae423-1004-40f6-9356-e1c3add4d965", + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.indices.struct_store import NLSQLTableQueryEngine\n", + "\n", + "query_engine = NLSQLTableQueryEngine(\n", + " sql_database=sql_database,\n", + " tables=[\"albums\", \"tracks\", \"artists\"],\n", + " llm=llm_70b,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "id": "a65ecd70-09c4-4872-b712-3a8235d03db2", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Here are some albums: For Those About To Rock We Salute You, Balls to the Wall, Restless and Wild, Let There Be Rock, Big Ones, Jagged Little Pill, Facelift, Warner 25 Anos, Plays Metallica By Four Cellos, and Audioslave.\n" + ] + } + ], + "source": [ + "response = query_engine.query(\"What are some albums?\")\n", + "\n", + "print(response)" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "id": "c12b93ef-d6d1-4d15-9cb2-343070f72851", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Here are 5 artists: AC/DC, Accept, Aerosmith, Alanis Morissette, and Alice In Chains.\n" + ] + } + ], + "source": [ + "response = query_engine.query(\"What are some artists? Limit it to 5.\")\n", + "\n", + "print(response)" + ] + }, + { + "cell_type": "markdown", + "id": "2c243d38-c6ac-445c-b9d4-53a9ae013b7b", + "metadata": {}, + "source": [ + "This last query should be a more complex join" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "id": "553741c2-1050-445d-979a-ae2150ee3248", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Here are three tracks from the legendary Australian rock band AC/DC: \"For Those About To Rock (We Salute You)\", \"Put The Finger On You\", and \"Let's Get It Up\".\n" + ] + } + ], + "source": [ + "response = query_engine.query(\n", + " \"What are some tracks from the artist AC/DC? Limit it to 3\"\n", + ")\n", + "\n", + "print(response)" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "id": "300689d7-9e67-4404-9898-27404ee6d4b5", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "SELECT tracks.Name FROM tracks INNER JOIN albums ON tracks.AlbumId = albums.AlbumId INNER JOIN artists ON albums.ArtistId = artists.ArtistId WHERE artists.Name = 'AC/DC' LIMIT 3;\n" + ] + } + ], + "source": [ + "print(response.metadata[\"sql_query\"])" + ] + }, + { + "cell_type": "markdown", + "id": "1419fe67-aa6a-47db-88cd-9bb251c15615", + "metadata": {}, + "source": [ + "## 5. Structured Data Extraction\n", + "\n", + "An important use case for function calling is extracting structured objects. LlamaIndex provides an intuitive interface for this through `structured_predict` - simply define the target Pydantic class (can be nested), and given a prompt, we extract out the desired object.\n", + "\n", + "**NOTE**: Since there's no native function calling support with Llama3, the structured extraction is performed by prompting the LLM + output parsing." + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "id": "4432f35a-5f29-45e9-a928-32e6d77b158e", + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.llms.groq import Groq\n", + "from llama_index.core.prompts import PromptTemplate\n", + "from pydantic import BaseModel\n", + "\n", + "\n", + "class Restaurant(BaseModel):\n", + " \"\"\"A restaurant with name, city, and cuisine.\"\"\"\n", + "\n", + " name: str\n", + " city: str\n", + " cuisine: str\n", + "\n", + "\n", + "llm = Groq(model=\"llama3-8b-8192\", pydantic_program_mode=\"llm\")\n", + "prompt_tmpl = PromptTemplate(\n", + " \"Generate a restaurant in a given city {city_name}\"\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "id": "2c451f52-a051-4ba2-a683-0c1fd258d986", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "name='Café Havana' city='Miami' cuisine='Cuban'\n" + ] + } + ], + "source": [ + "restaurant_obj = llm.structured_predict(\n", + " Restaurant, prompt_tmpl, city_name=\"Miami\"\n", + ")\n", + "print(restaurant_obj)" + ] + }, + { + "cell_type": "markdown", + "id": "839018a9-b65f-4824-83f7-2e4e52b55c5d", + "metadata": {}, + "source": [ + "## 6. Adding Chat History to RAG (Chat Engine)\n", + "\n", + "In this section we create a stateful chatbot from a RAG pipeline, with our chat engine abstraction.\n", + "\n", + "Unlike a stateless query engine, the chat engine maintains conversation history (through a memory module like buffer memory). It performs retrieval given a condensed question, and feeds the condensed question + context + chat history into the final LLM prompt.\n", + "\n", + "Related resource: https://docs.llamaindex.ai/en/stable/examples/chat_engine/chat_engine_condense_plus_context/" + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "id": "27e56315-9513-4b32-bf9a-ce97c3ab52df", + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core.memory import ChatMemoryBuffer\n", + "from llama_index.core.chat_engine import CondensePlusContextChatEngine\n", + "\n", + "memory = ChatMemoryBuffer.from_defaults(token_limit=3900)\n", + "\n", + "chat_engine = CondensePlusContextChatEngine.from_defaults(\n", + " index.as_retriever(),\n", + " memory=memory,\n", + " llm=llm,\n", + " context_prompt=(\n", + " \"You are a chatbot, able to have normal interactions, as well as talk\"\n", + " \" about the Kendrick and Drake beef.\"\n", + " \"Here are the relevant documents for the context:\\n\"\n", + " \"{context_str}\"\n", + " \"\\nInstruction: Use the previous chat history, or the context above, to interact and help the user.\"\n", + " ),\n", + " verbose=True,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "id": "b24524d2-fdce-4237-8ecc-67f139302303", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Condensed question: Tell me about the songs Drake released in the beef.\n", + "Context: page_label: 31\n", + "file_path: data/drake_kendrick_beef.pdf\n", + "\n", + "Culture\n", + "Shaboo zey’s Cowboy Carter Features Were Only the Be ginning\n", + "By Heven Haile\n", + "Sign up for Manual, our new flagship newsletter\n", + "Useful advice on style, health, and more, four days a week.\n", + "5/10/24, 10:08 PM The Kendrick Lamar/Drake Beef, Explained | GQ\n", + "https://www.gq.com/story/the-kendrick-lamar-drake-beef-explained 31/34\n", + "\n", + "page_label: 18\n", + "file_path: data/drake_kendrick_beef.pdf\n", + "\n", + "Kurrco\n", + "@Kurrco·Follow\n", + "KENDRICK LAMAR\n", + "6\u000016 IN LA\n", + "(DRAKE DISS)\n", + "OUT NOW \n", + "This video has been deleted.\n", + "6\u000008 AM · May 3, 2024\n", + "59.3K Reply Copy link\n", + "Read 1.3K replies\n", + "After all this talk about “the clock,†who among us expected Kendrick to follow up his\n", + "own titanic diss track with another missile just three days later? Friday morning he\n", + "released “6:16 in LA,†with its title of course being a nod to Drake's series of time-stamp-\n", + "Sign up for Manual, our new flagship newsletter\n", + "Useful advice on style, health, and more, four days a week.\n", + "5/10/24, 10:08 PM The Kendrick Lamar/Drake Beef, Explained | GQ\n", + "https://www.gq.com/story/the-kendrick-lamar-drake-beef-explained 18/34\n", + "The infamous Drake-Kendrick beef! According to the context, Drake didn't release any songs directly addressing the beef. However, Kendrick Lamar did release a few tracks that were perceived as diss tracks aimed at Drake.\n", + "\n", + "One of the notable tracks is \"King Kunta\" from Kendrick's album \"To Pimp a Butterfly\" (2015). Although not directly aimed at Drake, some interpreted the lyrics as a subtle jab at the Canadian rapper.\n", + "\n", + "Later, in 2024, Kendrick released \"6:16 in LA\", which was seen as a response to Drake's \"The Clock\" (2024). However, Drake didn't release any direct responses to Kendrick's diss tracks.\n", + "\n", + "Would you like to know more about the beef or the songs involved?\n" + ] + } + ], + "source": [ + "response = chat_engine.chat(\n", + " \"Tell me about the songs Drake released in the beef.\"\n", + ")\n", + "print(str(response))" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "id": "f9a87a16-2864-4c48-95e7-a2103e119242", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Condensed question: What do you want to know about Kendrick Lamar's involvement in the Drake beef?\n", + "Context: page_label: 17\n", + "file_path: data/drake_kendrick_beef.pdf\n", + "\n", + "Melly is, of course, the Florida rapper whose rising career came to a screeching halt\n", + "thanks to a still ongoing murder trial accusing Melly of the premeditated murders of two\n", + "YNW associates—ostensibly, two close friends. (Second best line: using Haley Joel\n", + "Osment's IMDb for a two-for-one A.I. and ghostwriters reference.)\n", + "With lines referencing Puff Daddy notoriously slapping Drake and calling out Drake's\n", + "right-hand enforcer Chubbs by name, Kendrick's threatening to “take it there,†but for\n", + "now it remains a fun war of words and one that doesn't seem likely to end anytime soon,\n", + "much less in an anticlimax like the Drake-Pusha T beef. Drake can only have been\n", + "desperate for Kendrick to respond because he has a fully loaded clip waiting to shoot,\n", + "and Kendrick for his part here, promises “headshots all year, you better walk around like\n", + "Daft Punk.†Summer's heating up.\n", + "May 3: K endrick g oes back-to-back with “6:16 in L Aâ€\n", + "Sign up for Manual, our new flagship newsletter\n", + "Useful advice on style, health, and more, four days a week.\n", + "5/10/24, 10:08 PM The Kendrick Lamar/Drake Beef, Explained | GQ\n", + "https://www.gq.com/story/the-kendrick-lamar-drake-beef-explained 17/34\n", + "\n", + "page_label: 1\n", + "file_path: data/drake_kendrick_beef.pdf\n", + "\n", + "Culture\n", + "The K endrick L amar /Drake Bee f, ExplainedChrist opher P olk/Getty Ima ges\n", + "Sign up for Manual, our new flagship newsletter\n", + "Useful advice on style, health, and more, four days a week.Email address\n", + "SIGN ME UP\n", + "NO THANKS\n", + "5/10/24, 10:08 PM The Kendrick Lamar/Drake Beef, Explained | GQ\n", + "https://www.gq.com/story/the-kendrick-lamar-drake-beef-explained 1/34\n", + "Kendrick Lamar! According to the context, Kendrick Lamar did release some tracks that were perceived as diss tracks aimed at Drake. One notable example is \"The Heart Part 4\" (2017), which contains lyrics that some interpreted as a response to Drake.\n", + "\n", + "Additionally, Kendrick released \"Humble\" (2017) which some saw as a diss track aimed at Drake. The lyrics in \"Humble\" contain lines that some interpreted as a reference to Drake's lyrics in his song \"Glow\" (2016).\n", + "\n", + "Kendrick also released \"King Kunta\" (2015) which, although not directly aimed at Drake, some interpreted as a subtle jab at the Canadian rapper.\n", + "\n", + "Would you like to know more about the beef or the songs involved?\n" + ] + } + ], + "source": [ + "response = chat_engine.chat(\"What about Kendrick?\")\n", + "print(str(response))" + ] + }, + { + "cell_type": "markdown", + "id": "a7fa07ed-58f0-445e-bbd3-4ad8bac6598e", + "metadata": {}, + "source": [ + "## 7. Agents\n", + "\n", + "Here we build agents with Llama 3. We perform RAG over simple functions as well as the documents above." + ] + }, + { + "cell_type": "markdown", + "id": "aa98d735-5d43-413f-aab3-fc3adeed81b1", + "metadata": {}, + "source": [ + "### Agents And Tools" + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "id": "cacc1470", + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.llms.groq import Groq\n", + "\n", + "llm = Groq(model=\"llama3-8b-8192\")\n", + "llm_70b = Groq(model=\"llama3-70b-8192\")" + ] + }, + { + "cell_type": "code", + "execution_count": 38, + "id": "fb73a01f-8a2e-4dd6-91f8-710c92b81c56", + "metadata": {}, + "outputs": [], + "source": [ + "import json\n", + "from typing import Sequence, List\n", + "\n", + "from llama_index.core.llms import ChatMessage\n", + "from llama_index.core.tools import BaseTool, FunctionTool\n", + "from llama_index.agent.openai import OpenAIAgent\n", + "\n", + "import nest_asyncio\n", + "\n", + "nest_asyncio.apply()" + ] + }, + { + "cell_type": "markdown", + "id": "efbee832-9786-4551-93f2-01ee90fa0f4d", + "metadata": {}, + "source": [ + "### Define Tools" + ] + }, + { + "cell_type": "code", + "execution_count": 40, + "id": "b2058b36-8053-4dc8-9218-c286702ecf66", + "metadata": {}, + "outputs": [], + "source": [ + "def multiply(a: int, b: int) -> int:\n", + " \"\"\"Multiple two integers and returns the result integer\"\"\"\n", + " return a * b\n", + "\n", + "\n", + "def add(a: int, b: int) -> int:\n", + " \"\"\"Add two integers and returns the result integer\"\"\"\n", + " return a + b\n", + "\n", + "\n", + "def subtract(a: int, b: int) -> int:\n", + " \"\"\"Subtract two integers and returns the result integer\"\"\"\n", + " return a - b\n", + "\n", + "\n", + "def divide(a: int, b: int) -> int:\n", + " \"\"\"Divides two integers and returns the result integer\"\"\"\n", + " return a / b\n", + "\n", + "\n", + "multiply_tool = FunctionTool.from_defaults(fn=multiply)\n", + "add_tool = FunctionTool.from_defaults(fn=add)\n", + "subtract_tool = FunctionTool.from_defaults(fn=subtract)\n", + "divide_tool = FunctionTool.from_defaults(fn=divide)\n", + "llm_70b.is_function_calling_model = True" + ] + }, + { + "cell_type": "markdown", + "id": "22d7d4dc-e2ce-402c-9350-0e7010d0080c", + "metadata": {}, + "source": [ + "### ReAct Agent" + ] + }, + { + "cell_type": "code", + "execution_count": 41, + "id": "72a48053-e30d-4884-bcac-80752047d940", + "metadata": {}, + "outputs": [], + "source": [ + "agent = OpenAIAgent.from_tools(\n", + " [multiply_tool, add_tool, subtract_tool, divide_tool],\n", + " llm=llm_70b,\n", + " verbose=True,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "7ada828a-3b05-4fc1-90e8-986c5607ae61", + "metadata": {}, + "source": [ + "### Querying" + ] + }, + { + "cell_type": "code", + "execution_count": 42, + "id": "9c0b1e56-d9f7-4615-a15a-c91fea1adb00", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Added user message to memory: What is (121 + 2) * 5?\n", + "=== Calling Function ===\n", + "Calling function: add with args: {\"a\":121,\"b\":2}\n", + "Got output: 123\n", + "========================\n", + "\n", + "=== Calling Function ===\n", + "Calling function: multiply with args: {\"a\":123,\"b\":5}\n", + "Got output: 615\n", + "========================\n", + "\n", + "The answer is 615.\n" + ] + } + ], + "source": [ + "response = agent.chat(\"What is (121 + 2) * 5?\")\n", + "print(str(response))" + ] + }, + { + "cell_type": "markdown", + "id": "67ce45f6-bdd4-42aa-8f74-43a50f14094e", + "metadata": {}, + "source": [ + "### ReAct Agent With RAG QueryEngine Tools" + ] + }, + { + "cell_type": "code", + "execution_count": 68, + "id": "97fce5f1-eacf-4ecc-9e83-072e74d3a2a9", + "metadata": {}, + "outputs": [], + "source": [ + "from llama_index.core import (\n", + " SimpleDirectoryReader,\n", + " VectorStoreIndex,\n", + " StorageContext,\n", + " load_index_from_storage,\n", + ")\n", + "\n", + "from llama_index.core.tools import QueryEngineTool, ToolMetadata" + ] + }, + { + "cell_type": "markdown", + "id": "23963d00-e3d2-4ce1-9ac3-aa486bf4b1a5", + "metadata": {}, + "source": [ + "### Create ReAct Agent using RAG QueryEngine Tools" + ] + }, + { + "cell_type": "markdown", + "id": "1844dbbd-477c-4c4d-bb18-2c2e16a75a50", + "metadata": {}, + "source": [ + "This may take 4 minutes to run:" + ] + }, + { + "cell_type": "code", + "execution_count": 69, + "id": "66ab1e60-3374-4eb9-b7dc-c28db3b47c51", + "metadata": {}, + "outputs": [], + "source": [ + "drake_index = VectorStoreIndex.from_documents(docs_drake)\n", + "drake_query_engine = drake_index.as_query_engine(similarity_top_k=3)\n", + "\n", + "kendrick_index = VectorStoreIndex.from_documents(docs_kendrick)\n", + "kendrick_query_engine = kendrick_index.as_query_engine(similarity_top_k=3)" + ] + }, + { + "cell_type": "code", + "execution_count": 70, + "id": "0e241fe9-f390-4be5-b3c4-da4f56db01ef", + "metadata": {}, + "outputs": [], + "source": [ + "drake_tool = QueryEngineTool(\n", + " drake_index.as_query_engine(),\n", + " metadata=ToolMetadata(\n", + " name=\"drake_search\",\n", + " description=\"Useful for searching over Drake's life.\",\n", + " ),\n", + ")\n", + "\n", + "kendrick_tool = QueryEngineTool(\n", + " kendrick_index.as_query_engine(),\n", + " metadata=ToolMetadata(\n", + " name=\"kendrick_search\",\n", + " description=\"Useful for searching over Kendrick's life.\",\n", + " ),\n", + ")\n", + "\n", + "query_engine_tools = [drake_tool, kendrick_tool]" + ] + }, + { + "cell_type": "code", + "execution_count": 71, + "id": "b922feac-b221-4737-92c6-e63eeab4eab7", + "metadata": {}, + "outputs": [], + "source": [ + "agent = ReActAgent.from_tools(\n", + " query_engine_tools,\n", + " llm=llm_70b,\n", + " verbose=True,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "7e38edc8-47f8-4f1a-ad87-bc3a9e31a65e", + "metadata": {}, + "source": [ + "### Querying" + ] + }, + { + "cell_type": "code", + "execution_count": 72, + "id": "035c2c8b-5a5e-4df0-a423-4c2d6054f457", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[1;3;38;5;200mThought: I need to use a tool to help me answer the question.\n", + "Action: kendrick_search\n", + "Action Input: {'input': \"Kendrick Lamar's childhood\"}\n", + "\u001b[0m\u001b[1;3;34mObservation: Kendrick Lamar was born on June 17, 1987, in Compton, California. His parents, Kenneth \"Kenny\" Duckworth and Paul Oliver, relocated to Compton in 1984 due to his father's affiliation with the Gangster Disciples. Lamar was named after singer-songwriter Eddie Kendricks of the Temptations. He was an only child until the age of seven and was described as a loner by his mother. Eventually, his parents had his two younger brothers and younger sister, businesswoman Kayla Sawyer (née Duckworth).\n", + "\u001b[0m\u001b[1;3;38;5;200mThought: I need to use a tool to help me answer the question.\n", + "Action: drake_search\n", + "Action Input: {'input': \"Drake's childhood\"}\n", + "\u001b[0m\u001b[1;3;34mObservation: Drake was raised in two neighborhoods. He lived on Weston Road in Toronto's working-class west end until grade six and attended Weston Memorial Junior Public School until grade four.\n", + "\u001b[0m\u001b[1;3;38;5;200mThought: I need to use a tool to help me answer the question.\n", + "Action: drake_search\n", + "Action Input: {'input': \"Drake's family and early life\"}\n", + "\u001b[0m\u001b[1;3;34mObservation: Drake was raised in two neighborhoods. He lived on Weston Road in Toronto's working-class west end until grade six and attended Weston Memorial Junior Public School until grade four. He was a promising right winger in minor hockey with the Weston Red Wings.\n", + "\u001b[0m\u001b[1;3;38;5;200mThought: I need to use a tool to help me answer the question.\n", + "Action: drake_search\n", + "Action Input: {'input': \"Drake's parents and family background\"}\n", + "\u001b[0m\u001b[1;3;34mObservation: Drake's parents are Sandi Graham and Dennis Graham. His mother, Sandi Graham, is a Jewish Canadian and his father, Dennis Graham, is an African American from Memphis, Tennessee.\n", + "\u001b[0m\u001b[1;3;38;5;200mThought: I have enough information to answer the question.\n", + "Answer: Kendrick Lamar grew up in Compton, California, with his parents and siblings, while Drake grew up in Toronto, Canada, with his Jewish-Canadian mother and African-American father, moving between two neighborhoods and playing minor hockey.\n", + "\u001b[0mKendrick Lamar grew up in Compton, California, with his parents and siblings, while Drake grew up in Toronto, Canada, with his Jewish-Canadian mother and African-American father, moving between two neighborhoods and playing minor hockey.\n" + ] + } + ], + "source": [ + "response = agent.chat(\"Tell me about how Kendrick and Drake grew up\")\n", + "print(str(response))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "66549ee8", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.3" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/recipes/llama_api_providers/llama3_cookbook_groq.ipynb b/recipes/llama_api_providers/llama3_cookbook_groq.ipynb deleted file mode 100644 index e595bb75..00000000 --- a/recipes/llama_api_providers/llama3_cookbook_groq.ipynb +++ /dev/null @@ -1,937 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "09211e76-286f-4b12-acd7-cfb082dc2d66", - "metadata": {}, - "source": [ - "# Llama 3 Cookbook with LlamaIndex and Groq\n", - "\n", - "<a href=\"https://colab.research.google.com/github/meta-llama/llama-recipes/blob/main/recipes/llama_api_providers/llama3_cookbook_groq.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>\n", - "\n", - "Meta developed and released the Meta [Llama 3](https://ai.meta.com/blog/meta-llama-3/) family of large language models (LLMs), a collection of pretrained and instruction tuned generative text models in 8 and 70B sizes. The Llama 3 instruction tuned models are optimized for dialogue use cases and outperform many of the available open source chat models on common industry benchmarks.\n", - "\n", - "In this notebook, we demonstrate how to use Llama 3 with LlamaIndex for a comprehensive set of use cases. \n", - "1. Basic completion / chat \n", - "2. Basic RAG (Vector Search, Summarization)\n", - "3. Advanced RAG (Routing)\n", - "4. Text-to-SQL \n", - "5. Structured Data Extraction\n", - "6. Chat Engine + Memory\n", - "7. Agents\n", - "\n", - "\n", - "We use Llama3-8B and Llama3-70B through [Groq](https://groq.com) - you can sign up there to get a free trial API key." - ] - }, - { - "cell_type": "markdown", - "id": "de2901c0-e20d-48e5-9385-dbca2258c564", - "metadata": {}, - "source": [ - "## Installation and Setup" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "bcf643ac-b025-4812-aaed-f8f85d1ba505", - "metadata": {}, - "outputs": [], - "source": [ - "!pip install llama-index\n", - "!pip install llama-index-llms-groq\n", - "!pip install llama-index-embeddings-huggingface\n", - "!pip install llama-parse" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "641fa5c8-d63e-47f8-b5bc-ebf994f6e314", - "metadata": {}, - "outputs": [], - "source": [ - "import nest_asyncio\n", - "\n", - "nest_asyncio.apply()" - ] - }, - { - "cell_type": "markdown", - "id": "1714ea83-6cd4-44bb-b53f-4499126c3809", - "metadata": {}, - "source": [ - "### Setup LLM using Groq\n", - "\n", - "To use [Groq](https://groq.com), you need to make sure that `GROQ_API_KEY` is specified as an environment variable." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5d46440c", - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "\n", - "os.environ[\"GROQ_API_KEY\"] = \"YOUR_GROQ_API_KEY\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "d5256970-eba4-499a-b438-8766a290a61a", - "metadata": {}, - "outputs": [], - "source": [ - "from llama_index.llms.groq import Groq\n", - "\n", - "llm = Groq(model=\"llama3-8b-8192\")\n", - "llm_70b = Groq(model=\"llama3-70b-8192\")" - ] - }, - { - "cell_type": "markdown", - "id": "41c3f154-d345-465d-8eed-63b99adbd3ca", - "metadata": {}, - "source": [ - "### Setup Embedding Model" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "0cda736d-e414-44e3-8c15-6be49f5f0282", - "metadata": {}, - "outputs": [], - "source": [ - "from llama_index.embeddings.huggingface import HuggingFaceEmbedding\n", - "\n", - "embed_model = HuggingFaceEmbedding(model_name=\"BAAI/bge-small-en-v1.5\")" - ] - }, - { - "cell_type": "markdown", - "id": "3625cf29-7c56-475a-8efd-fbe8ffce194d", - "metadata": {}, - "source": [ - "### Define Global Settings Configuration\n", - "\n", - "In LlamaIndex, you can define global settings so you don't have to pass the LLM / embedding model objects everywhere." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "be3565d1-cc5b-4149-ad5a-7be8f7818e0c", - "metadata": {}, - "outputs": [], - "source": [ - "from llama_index.core import Settings\n", - "\n", - "Settings.llm = llm\n", - "Settings.embed_model = embed_model" - ] - }, - { - "cell_type": "markdown", - "id": "42449b68-47f5-40cf-9207-191307b25e8e", - "metadata": {}, - "source": [ - "### Download Data\n", - "\n", - "Here you'll download data that's used in section 2 and onwards.\n", - "\n", - "We'll download some articles on Kendrick, Drake, and their beef (as of May 2024)." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "59b18640-cdfa-42c1-ab53-115983c1fdc4", - "metadata": {}, - "outputs": [], - "source": [ - "!mkdir data\n", - "!wget \"https://www.dropbox.com/scl/fi/t1soxfjdp0v44an6sdymd/drake_kendrick_beef.pdf?rlkey=u9546ymb7fj8lk2v64r6p5r5k&st=wjzzrgil&dl=1\" -O data/drake_kendrick_beef.pdf\n", - "!wget \"https://www.dropbox.com/scl/fi/nts3n64s6kymner2jppd6/drake.pdf?rlkey=hksirpqwzlzqoejn55zemk6ld&st=mohyfyh4&dl=1\" -O data/drake.pdf\n", - "!wget \"https://www.dropbox.com/scl/fi/8ax2vnoebhmy44bes2n1d/kendrick.pdf?rlkey=fhxvn94t5amdqcv9vshifd3hj&st=dxdtytn6&dl=1\" -O data/kendrick.pdf" - ] - }, - { - "cell_type": "markdown", - "id": "9edee491-05f8-4fbb-9394-baa82f1e5087", - "metadata": {}, - "source": [ - "### Load Data\n", - "\n", - "We load data using LlamaParse by default, but you can also choose to opt for our free pypdf reader (in SimpleDirectoryReader by default) if you don't have an account! \n", - "\n", - "1. LlamaParse: Signup for an account here: cloud.llamaindex.ai. You get 1k free pages a day, and paid plan is 7k free pages + 0.3c per additional page. LlamaParse is a good option if you want to parse complex documents, like PDFs with charts, tables, and more. \n", - "\n", - "2. Default PDF Parser (In `SimpleDirectoryReader`). If you don't want to signup for an account / use a PDF service, just use the default PyPDF reader bundled in our file loader. It's a good choice for getting started!" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "b648635a-2672-407f-bae6-01660e5426d7", - "metadata": {}, - "outputs": [], - "source": [ - "# Uncomment this code if you want to use LlamaParse\n", - "# from llama_parse import LlamaParse\n", - "\n", - "# docs_kendrick = LlamaParse(result_type=\"text\").load_data(\"./data/kendrick.pdf\")\n", - "# docs_drake = LlamaParse(result_type=\"text\").load_data(\"./data/drake.pdf\")\n", - "# docs_both = LlamaParse(result_type=\"text\").load_data(\n", - "# \"./data/drake_kendrick_beef.pdf\"\n", - "# )\n", - "\n", - "# Uncomment this code if you want to use SimpleDirectoryReader / default PDF Parser\n", - "# from llama_index.core import SimpleDirectoryReader\n", - "\n", - "# docs_kendrick = SimpleDirectoryReader(input_files=[\"data/kendrick.pdf\"]).load_data()\n", - "# docs_drake = SimpleDirectoryReader(input_files=[\"data/drake.pdf\"]).load_data()\n", - "# docs_both = SimpleDirectoryReader(input_files=[\"data/drake_kendrick_beef.pdf\"]).load_data()" - ] - }, - { - "cell_type": "markdown", - "id": "071a8f44-2765-4d57-b8da-15d3c718874d", - "metadata": {}, - "source": [ - "## 1. Basic Completion and Chat" - ] - }, - { - "cell_type": "markdown", - "id": "c0b1ace8-32fb-46b2-a065-8817ddc0310b", - "metadata": {}, - "source": [ - "### Call complete with a prompt" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a2db43f9-74af-453c-9f83-8db0379c3302", - "metadata": {}, - "outputs": [], - "source": [ - "response = llm.complete(\"do you like drake or kendrick better?\")\n", - "\n", - "print(response)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "89326153-e2d2-4136-8193-fb27d20670c3", - "metadata": {}, - "outputs": [], - "source": [ - "stream_response = llm.stream_complete(\n", - " \"you're a drake fan. tell me why you like drake more than kendrick\"\n", - ")\n", - "\n", - "for t in stream_response:\n", - " print(t.delta, end=\"\")" - ] - }, - { - "cell_type": "markdown", - "id": "a4558339-c8a1-4d26-a430-eb71768b5351", - "metadata": {}, - "source": [ - "### Call chat with a list of messages" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5f393031-f743-4a28-a122-71817e3fbd1b", - "metadata": {}, - "outputs": [], - "source": [ - "from llama_index.core.llms import ChatMessage\n", - "\n", - "messages = [\n", - " ChatMessage(role=\"system\", content=\"You are Kendrick.\"),\n", - " ChatMessage(role=\"user\", content=\"Write a verse.\"),\n", - "]\n", - "response = llm.chat(messages)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8e9551fc-0efc-4671-bc57-339121004c39", - "metadata": {}, - "outputs": [], - "source": [ - "print(response)" - ] - }, - { - "cell_type": "markdown", - "id": "6a67a33d-fe7d-4381-983f-ca3a6945995d", - "metadata": {}, - "source": [ - "## 2. Basic RAG (Vector Search, Summarization)" - ] - }, - { - "cell_type": "markdown", - "id": "c104a0c5-e43b-475b-9fa6-186906c1f327", - "metadata": {}, - "source": [ - "### Basic RAG (Vector Search)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "216787b7-e40a-43fc-a4ca-c43cb798ce9e", - "metadata": {}, - "outputs": [], - "source": [ - "from llama_index.core import VectorStoreIndex\n", - "\n", - "index = VectorStoreIndex.from_documents(docs_both)\n", - "query_engine = index.as_query_engine(similarity_top_k=3)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a854e9d3-70f1-4927-a2f6-59e90c31f2f0", - "metadata": {}, - "outputs": [], - "source": [ - "response = query_engine.query(\"Tell me about family matters\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "da796970-bc38-4cb4-9d32-ebd1b71d4bdc", - "metadata": {}, - "outputs": [], - "source": [ - "print(str(response))" - ] - }, - { - "cell_type": "markdown", - "id": "eff935b7-4f37-4758-8997-82fb0852e732", - "metadata": {}, - "source": [ - "### Basic RAG (Summarization)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "dfe72300-7a38-453e-b1f2-bc1c00a01ff7", - "metadata": {}, - "outputs": [], - "source": [ - "from llama_index.core import SummaryIndex\n", - "\n", - "summary_index = SummaryIndex.from_documents(docs_both)\n", - "summary_engine = summary_index.as_query_engine()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "178f1f12-51f7-4b45-9346-c16ed12b3b8d", - "metadata": {}, - "outputs": [], - "source": [ - "response = summary_engine.query(\n", - " \"Given your assessment of this article, who won the beef?\"\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "b8125382-d576-4b99-a0da-2fbb71a5b19b", - "metadata": {}, - "outputs": [], - "source": [ - "print(str(response))" - ] - }, - { - "cell_type": "markdown", - "id": "68918eb6-f1e6-460c-b1d5-fb49c3fed4b8", - "metadata": {}, - "source": [ - "## 3. Advanced RAG (Routing)" - ] - }, - { - "cell_type": "markdown", - "id": "94fd7097-0287-4522-8e43-3e088291fa8a", - "metadata": {}, - "source": [ - "### Build a Router that can choose whether to do vector search or summarization" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3949dd41-e9a1-47f6-900f-4f987cad3f84", - "metadata": {}, - "outputs": [], - "source": [ - "from llama_index.core.tools import QueryEngineTool, ToolMetadata\n", - "\n", - "vector_tool = QueryEngineTool(\n", - " index.as_query_engine(),\n", - " metadata=ToolMetadata(\n", - " name=\"vector_search\",\n", - " description=\"Useful for searching for specific facts.\",\n", - " ),\n", - ")\n", - "\n", - "summary_tool = QueryEngineTool(\n", - " index.as_query_engine(response_mode=\"tree_summarize\"),\n", - " metadata=ToolMetadata(\n", - " name=\"summary\",\n", - " description=\"Useful for summarizing an entire document.\",\n", - " ),\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "d063d07b-c03e-4b26-8556-e3c058d2fd52", - "metadata": {}, - "outputs": [], - "source": [ - "from llama_index.core.query_engine import RouterQueryEngine\n", - "\n", - "query_engine = RouterQueryEngine.from_defaults(\n", - " [vector_tool, summary_tool], select_multi=False, verbose=True, llm=llm_70b\n", - ")\n", - "\n", - "response = query_engine.query(\n", - " \"Tell me about the song meet the grahams - why is it significant\"\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "396aad75-5a71-4bd9-a760-7f13fe223079", - "metadata": {}, - "outputs": [], - "source": [ - "print(response)" - ] - }, - { - "cell_type": "markdown", - "id": "a795f0bc-e871-4580-8983-6fb27d421fc5", - "metadata": {}, - "source": [ - "## 4. Text-to-SQL \n", - "\n", - "Here, we download and use a sample SQLite database with 11 tables, with various info about music, playlists, and customers. We will limit to a select few tables for this test." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a5096501-92c3-41af-a871-ade869d710fb", - "metadata": {}, - "outputs": [], - "source": [ - "!wget \"https://www.sqlitetutorial.net/wp-content/uploads/2018/03/chinook.zip\" -O \"./data/chinook.zip\"\n", - "!unzip \"./data/chinook.zip\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "d4db989e-c18d-4416-928e-7be4ead4d869", - "metadata": {}, - "outputs": [], - "source": [ - "from sqlalchemy import (\n", - " create_engine,\n", - " MetaData,\n", - " Table,\n", - " Column,\n", - " String,\n", - " Integer,\n", - " select,\n", - " column,\n", - ")\n", - "\n", - "engine = create_engine(\"sqlite:///chinook.db\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "bf6ed233-0ea3-4d4f-8c33-5b6d558b89b9", - "metadata": {}, - "outputs": [], - "source": [ - "from llama_index.core import SQLDatabase\n", - "\n", - "sql_database = SQLDatabase(engine)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "debae423-1004-40f6-9356-e1c3add4d965", - "metadata": {}, - "outputs": [], - "source": [ - "from llama_index.core.indices.struct_store import NLSQLTableQueryEngine\n", - "\n", - "query_engine = NLSQLTableQueryEngine(\n", - " sql_database=sql_database,\n", - " tables=[\"albums\", \"tracks\", \"artists\"],\n", - " llm=llm_70b,\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a65ecd70-09c4-4872-b712-3a8235d03db2", - "metadata": {}, - "outputs": [], - "source": [ - "response = query_engine.query(\"What are some albums?\")\n", - "\n", - "print(response)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "c12b93ef-d6d1-4d15-9cb2-343070f72851", - "metadata": {}, - "outputs": [], - "source": [ - "response = query_engine.query(\"What are some artists? Limit it to 5.\")\n", - "\n", - "print(response)" - ] - }, - { - "cell_type": "markdown", - "id": "2c243d38-c6ac-445c-b9d4-53a9ae013b7b", - "metadata": {}, - "source": [ - "This last query should be a more complex join" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "553741c2-1050-445d-979a-ae2150ee3248", - "metadata": {}, - "outputs": [], - "source": [ - "response = query_engine.query(\n", - " \"What are some tracks from the artist AC/DC? Limit it to 3\"\n", - ")\n", - "\n", - "print(response)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "300689d7-9e67-4404-9898-27404ee6d4b5", - "metadata": {}, - "outputs": [], - "source": [ - "print(response.metadata[\"sql_query\"])" - ] - }, - { - "cell_type": "markdown", - "id": "1419fe67-aa6a-47db-88cd-9bb251c15615", - "metadata": {}, - "source": [ - "## 5. Structured Data Extraction\n", - "\n", - "An important use case for function calling is extracting structured objects. LlamaIndex provides an intuitive interface for this through `structured_predict` - simply define the target Pydantic class (can be nested), and given a prompt, we extract out the desired object.\n", - "\n", - "**NOTE**: Since there's no native function calling support with Llama3, the structured extraction is performed by prompting the LLM + output parsing." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4432f35a-5f29-45e9-a928-32e6d77b158e", - "metadata": {}, - "outputs": [], - "source": [ - "from llama_index.llms.groq import Groq\n", - "from llama_index.core.prompts import PromptTemplate\n", - "from pydantic import BaseModel\n", - "\n", - "\n", - "class Restaurant(BaseModel):\n", - " \"\"\"A restaurant with name, city, and cuisine.\"\"\"\n", - "\n", - " name: str\n", - " city: str\n", - " cuisine: str\n", - "\n", - "\n", - "llm = Groq(model=\"llama3-8b-8192\", pydantic_program_mode=\"llm\")\n", - "prompt_tmpl = PromptTemplate(\n", - " \"Generate a restaurant in a given city {city_name}\"\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2c451f52-a051-4ba2-a683-0c1fd258d986", - "metadata": {}, - "outputs": [], - "source": [ - "restaurant_obj = llm.structured_predict(\n", - " Restaurant, prompt_tmpl, city_name=\"Miami\"\n", - ")\n", - "print(restaurant_obj)" - ] - }, - { - "cell_type": "markdown", - "id": "839018a9-b65f-4824-83f7-2e4e52b55c5d", - "metadata": {}, - "source": [ - "## 6. Adding Chat History to RAG (Chat Engine)\n", - "\n", - "In this section we create a stateful chatbot from a RAG pipeline, with our chat engine abstraction.\n", - "\n", - "Unlike a stateless query engine, the chat engine maintains conversation history (through a memory module like buffer memory). It performs retrieval given a condensed question, and feeds the condensed question + context + chat history into the final LLM prompt.\n", - "\n", - "Related resource: https://docs.llamaindex.ai/en/stable/examples/chat_engine/chat_engine_condense_plus_context/" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "27e56315-9513-4b32-bf9a-ce97c3ab52df", - "metadata": {}, - "outputs": [], - "source": [ - "from llama_index.core.memory import ChatMemoryBuffer\n", - "from llama_index.core.chat_engine import CondensePlusContextChatEngine\n", - "\n", - "memory = ChatMemoryBuffer.from_defaults(token_limit=3900)\n", - "\n", - "chat_engine = CondensePlusContextChatEngine.from_defaults(\n", - " index.as_retriever(),\n", - " memory=memory,\n", - " llm=llm,\n", - " context_prompt=(\n", - " \"You are a chatbot, able to have normal interactions, as well as talk\"\n", - " \" about the Kendrick and Drake beef.\"\n", - " \"Here are the relevant documents for the context:\\n\"\n", - " \"{context_str}\"\n", - " \"\\nInstruction: Use the previous chat history, or the context above, to interact and help the user.\"\n", - " ),\n", - " verbose=True,\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "b24524d2-fdce-4237-8ecc-67f139302303", - "metadata": {}, - "outputs": [], - "source": [ - "response = chat_engine.chat(\n", - " \"Tell me about the songs Drake released in the beef.\"\n", - ")\n", - "print(str(response))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f9a87a16-2864-4c48-95e7-a2103e119242", - "metadata": {}, - "outputs": [], - "source": [ - "response = chat_engine.chat(\"What about Kendrick?\")\n", - "print(str(response))" - ] - }, - { - "cell_type": "markdown", - "id": "a7fa07ed-58f0-445e-bbd3-4ad8bac6598e", - "metadata": {}, - "source": [ - "## 7. Agents\n", - "\n", - "Here we build agents with Llama 3. We perform RAG over simple functions as well as the documents above." - ] - }, - { - "cell_type": "markdown", - "id": "aa98d735-5d43-413f-aab3-fc3adeed81b1", - "metadata": {}, - "source": [ - "### Agents And Tools" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "fb73a01f-8a2e-4dd6-91f8-710c92b81c56", - "metadata": {}, - "outputs": [], - "source": [ - "import json\n", - "from typing import Sequence, List\n", - "\n", - "from llama_index.core.llms import ChatMessage\n", - "from llama_index.core.tools import BaseTool, FunctionTool\n", - "from llama_index.core.agent import ReActAgent\n", - "\n", - "import nest_asyncio\n", - "\n", - "nest_asyncio.apply()" - ] - }, - { - "cell_type": "markdown", - "id": "efbee832-9786-4551-93f2-01ee90fa0f4d", - "metadata": {}, - "source": [ - "### Define Tools" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "b2058b36-8053-4dc8-9218-c286702ecf66", - "metadata": {}, - "outputs": [], - "source": [ - "def multiply(a: int, b: int) -> int:\n", - " \"\"\"Multiple two integers and returns the result integer\"\"\"\n", - " return a * b\n", - "\n", - "\n", - "def add(a: int, b: int) -> int:\n", - " \"\"\"Add two integers and returns the result integer\"\"\"\n", - " return a + b\n", - "\n", - "\n", - "def subtract(a: int, b: int) -> int:\n", - " \"\"\"Subtract two integers and returns the result integer\"\"\"\n", - " return a - b\n", - "\n", - "\n", - "def divide(a: int, b: int) -> int:\n", - " \"\"\"Divides two integers and returns the result integer\"\"\"\n", - " return a / b\n", - "\n", - "\n", - "multiply_tool = FunctionTool.from_defaults(fn=multiply)\n", - "add_tool = FunctionTool.from_defaults(fn=add)\n", - "subtract_tool = FunctionTool.from_defaults(fn=subtract)\n", - "divide_tool = FunctionTool.from_defaults(fn=divide)" - ] - }, - { - "cell_type": "markdown", - "id": "22d7d4dc-e2ce-402c-9350-0e7010d0080c", - "metadata": {}, - "source": [ - "### ReAct Agent" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "72a48053-e30d-4884-bcac-80752047d940", - "metadata": {}, - "outputs": [], - "source": [ - "agent = ReActAgent.from_tools(\n", - " [multiply_tool, add_tool, subtract_tool, divide_tool],\n", - " llm=llm_70b,\n", - " verbose=True,\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "7ada828a-3b05-4fc1-90e8-986c5607ae61", - "metadata": {}, - "source": [ - "### Querying" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9c0b1e56-d9f7-4615-a15a-c91fea1adb00", - "metadata": {}, - "outputs": [], - "source": [ - "response = agent.chat(\"What is (121 + 2) * 5?\")\n", - "print(str(response))" - ] - }, - { - "cell_type": "markdown", - "id": "67ce45f6-bdd4-42aa-8f74-43a50f14094e", - "metadata": {}, - "source": [ - "### ReAct Agent With RAG QueryEngine Tools" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "97fce5f1-eacf-4ecc-9e83-072e74d3a2a9", - "metadata": {}, - "outputs": [], - "source": [ - "from llama_index.core import (\n", - " SimpleDirectoryReader,\n", - " VectorStoreIndex,\n", - " StorageContext,\n", - " load_index_from_storage,\n", - ")\n", - "\n", - "from llama_index.core.tools import QueryEngineTool, ToolMetadata" - ] - }, - { - "cell_type": "markdown", - "id": "23963d00-e3d2-4ce1-9ac3-aa486bf4b1a5", - "metadata": {}, - "source": [ - "### Create ReAct Agent using RAG QueryEngine Tools" - ] - }, - { - "cell_type": "markdown", - "id": "1844dbbd-477c-4c4d-bb18-2c2e16a75a50", - "metadata": {}, - "source": [ - "This may take 4 minutes to run:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "66ab1e60-3374-4eb9-b7dc-c28db3b47c51", - "metadata": {}, - "outputs": [], - "source": [ - "drake_index = VectorStoreIndex.from_documents(docs_drake)\n", - "drake_query_engine = drake_index.as_query_engine(similarity_top_k=3)\n", - "\n", - "kendrick_index = VectorStoreIndex.from_documents(docs_kendrick)\n", - "kendrick_query_engine = kendrick_index.as_query_engine(similarity_top_k=3)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "0e241fe9-f390-4be5-b3c4-da4f56db01ef", - "metadata": {}, - "outputs": [], - "source": [ - "drake_tool = QueryEngineTool(\n", - " drake_index.as_query_engine(),\n", - " metadata=ToolMetadata(\n", - " name=\"drake_search\",\n", - " description=\"Useful for searching over Drake's life.\",\n", - " ),\n", - ")\n", - "\n", - "kendrick_tool = QueryEngineTool(\n", - " kendrick_index.as_query_engine(),\n", - " metadata=ToolMetadata(\n", - " name=\"kendrick_search\",\n", - " description=\"Useful for searching over Kendrick's life.\",\n", - " ),\n", - ")\n", - "\n", - "query_engine_tools = [drake_tool, kendrick_tool]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "b922feac-b221-4737-92c6-e63eeab4eab7", - "metadata": {}, - "outputs": [], - "source": [ - "agent = ReActAgent.from_tools(\n", - " query_engine_tools,\n", - " llm=llm_70b,\n", - " verbose=True,\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "7e38edc8-47f8-4f1a-ad87-bc3a9e31a65e", - "metadata": {}, - "source": [ - "### Querying" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "035c2c8b-5a5e-4df0-a423-4c2d6054f457", - "metadata": {}, - "outputs": [], - "source": [ - "response = agent.chat(\"Tell me about how Kendrick and Drake grew up\")\n", - "print(str(response))" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.14" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} -- GitLab