diff --git a/recipes/use_cases/LiveData.ipynb b/recipes/use_cases/LiveData.ipynb
index 445215fcb66746750e1b194f88f95bbc98e48c39..5b71e61baaf7b286c018e903ed4e11b9a696589d 100644
--- a/recipes/use_cases/LiveData.ipynb
+++ b/recipes/use_cases/LiveData.ipynb
@@ -23,7 +23,8 @@
     "!pip install llama-index-core\n",
     "!pip install llama-index-llms-replicate\n",
     "!pip install llama-index-embeddings-huggingface\n",
-    "!pip install tavily-python"
+    "!pip install tavily-python\n",
+    "!pip install replicate"
    ]
   },
   {
diff --git a/recipes/use_cases/VideoSummary.ipynb b/recipes/use_cases/VideoSummary.ipynb
index f18eaf592778921e97840d82a69f0d0d1f45c70f..c2de08d0591b1e5e0c12f786c4643a44eeaf1e2d 100644
--- a/recipes/use_cases/VideoSummary.ipynb
+++ b/recipes/use_cases/VideoSummary.ipynb
@@ -32,7 +32,7 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "!pip install langchain youtube-transcript-api tiktoken pytube"
+    "!pip install langchain youtube-transcript-api tiktoken pytube replicate"
    ]
   },
   {
@@ -260,7 +260,7 @@
     "2. Loop over each subsequent sub-document, pass the previous summary with the current sub-document to generate a refined new summary;\n",
     "3. Return the final summary generated on the final sub-document as the final answer - the summary of the whole content.\n",
     "\n",
-    "An example prompt template for each call in step 2 is:\n",
+    "An example prompt template for each call in step 2, which gets used under the hood by LangChain, is:\n",
     "```\n",
     "Your job is to produce a final summary.\n",
     "We have provided an existing summary up to a certain point:\n",
@@ -279,6 +279,8 @@
    "metadata": {},
    "outputs": [],
    "source": [
+    "from langchain.chains.summarize import load_summarize_chain\n",
+    "\n",
     "chain = load_summarize_chain(llm, chain_type=\"refine\")\n",
     "chain.run(split_docs)"
    ]