diff --git a/create-app.ts b/create-app.ts
index 231ace9630141405c09a351a22ccf899544e6694..ad3343d30f3ce5ed5e8644bd2c4b35932453320e 100644
--- a/create-app.ts
+++ b/create-app.ts
@@ -32,6 +32,7 @@ export async function createApp({
   openAiKey,
   model,
   communityProjectPath,
+  llamapack,
   vectorDb,
   externalPort,
   postInstallAction,
@@ -75,6 +76,7 @@ export async function createApp({
     openAiKey,
     model,
     communityProjectPath,
+    llamapack,
     vectorDb,
     externalPort,
     postInstallAction,
diff --git a/helpers/constant.ts b/helpers/constant.ts
index 341fba2c0c45f599c5329678b60bb0acc285dff9..702d1d7fdf326d22ab4a0314309939d3037c39e2 100644
--- a/helpers/constant.ts
+++ b/helpers/constant.ts
@@ -1,2 +1,6 @@
 export const COMMUNITY_OWNER = "run-llama";
 export const COMMUNITY_REPO = "create_llama_projects";
+export const LLAMA_PACK_OWNER = "run-llama";
+export const LLAMA_PACK_REPO = "llama-hub";
+export const LLAMA_HUB_FOLDER_PATH = `${LLAMA_PACK_OWNER}/${LLAMA_PACK_REPO}/main/llama_hub`;
+export const LLAMA_PACK_CONFIG_PATH = `${LLAMA_HUB_FOLDER_PATH}/llama_packs/library.json`;
diff --git a/helpers/index.ts b/helpers/index.ts
index 39630aa5f2fa45410be8f5128fa6947bba0adc39..40c1b4eb92fe9356b51474618eb6383f678527ae 100644
--- a/helpers/index.ts
+++ b/helpers/index.ts
@@ -7,6 +7,7 @@ import { cyan } from "picocolors";
 
 import { COMMUNITY_OWNER, COMMUNITY_REPO } from "./constant";
 import { PackageManager } from "./get-pkg-manager";
+import { installLlamapackProject } from "./llama-pack";
 import { isHavingPoetryLockFile, tryPoetryRun } from "./poetry";
 import { installPythonTemplate } from "./python";
 import { downloadAndExtractRepo } from "./repo";
@@ -153,6 +154,11 @@ export const installTemplate = async (
     return;
   }
 
+  if (props.template === "llamapack" && props.llamapack) {
+    await installLlamapackProject(props);
+    return;
+  }
+
   if (props.framework === "fastapi") {
     await installPythonTemplate(props);
   } else {
diff --git a/helpers/llama-pack.ts b/helpers/llama-pack.ts
new file mode 100644
index 0000000000000000000000000000000000000000..01c26212662105f5cf89a0d0b0e98fd3da6b2169
--- /dev/null
+++ b/helpers/llama-pack.ts
@@ -0,0 +1,91 @@
+import fs from "fs/promises";
+import path from "path";
+import { LLAMA_HUB_FOLDER_PATH, LLAMA_PACK_CONFIG_PATH } from "./constant";
+import { copy } from "./copy";
+import { installPythonDependencies } from "./python";
+import { getRepoRawContent } from "./repo";
+import { InstallTemplateArgs } from "./types";
+
+export async function getAvailableLlamapackOptions(): Promise<
+  {
+    name: string;
+    folderPath: string;
+    example: boolean | undefined;
+  }[]
+> {
+  const libraryJsonRaw = await getRepoRawContent(LLAMA_PACK_CONFIG_PATH);
+  const libraryJson = JSON.parse(libraryJsonRaw);
+  const llamapackKeys = Object.keys(libraryJson);
+  return llamapackKeys
+    .map((key) => ({
+      name: key,
+      folderPath: libraryJson[key].id,
+      example: libraryJson[key].example,
+    }))
+    .filter((item) => !!item.example);
+}
+
+const copyLlamapackEmptyProject = async ({
+  root,
+}: Pick<InstallTemplateArgs, "root">) => {
+  const templatePath = path.join(
+    __dirname,
+    "..",
+    "templates/components/sample-projects/llamapack",
+  );
+  await copy("**", root, {
+    parents: true,
+    cwd: templatePath,
+  });
+};
+
+const copyData = async ({
+  root,
+}: Pick<InstallTemplateArgs, "root" | "llamapack">) => {
+  const dataPath = path.join(__dirname, "..", "templates/components/data");
+  await copy("**", path.join(root, "data"), {
+    parents: true,
+    cwd: dataPath,
+  });
+};
+
+const installLlamapackExample = async ({
+  root,
+  llamapack,
+}: Pick<InstallTemplateArgs, "root" | "llamapack">) => {
+  const exampleFileName = "example.py";
+  const readmeFileName = "README.md";
+  const exampleFilePath = `${LLAMA_HUB_FOLDER_PATH}/${llamapack}/${exampleFileName}`;
+  const readmeFilePath = `${LLAMA_HUB_FOLDER_PATH}/${llamapack}/${readmeFileName}`;
+
+  // Download example.py from llamapack and save to root
+  const exampleContent = await getRepoRawContent(exampleFilePath);
+  await fs.writeFile(path.join(root, exampleFileName), exampleContent);
+
+  // Download README.md from llamapack and combine with README-template.md,
+  // save to root and then delete template file
+  const readmeContent = await getRepoRawContent(readmeFilePath);
+  const readmeTemplateContent = await fs.readFile(
+    path.join(root, "README-template.md"),
+    "utf-8",
+  );
+  await fs.writeFile(
+    path.join(root, readmeFileName),
+    `${readmeContent}\n${readmeTemplateContent}`,
+  );
+  await fs.unlink(path.join(root, "README-template.md"));
+};
+
+export const installLlamapackProject = async ({
+  root,
+  llamapack,
+  postInstallAction,
+}: Pick<InstallTemplateArgs, "root" | "llamapack" | "postInstallAction">) => {
+  console.log("\nInstalling Llamapack project:", llamapack!);
+  await copyLlamapackEmptyProject({ root });
+  await copyData({ root });
+  await installLlamapackExample({ root, llamapack });
+  if (postInstallAction !== "none") {
+    installPythonDependencies(root);
+  }
+};
diff --git a/helpers/repo.ts b/helpers/repo.ts
index 2471a91a4ef8e72279cce5e9ab7d6e756d6cd8a6..3942c28cd08ae5a5d42f7db5a97d170434b041ed 100644
--- a/helpers/repo.ts
+++ b/helpers/repo.ts
@@ -61,3 +61,11 @@ export async function getRepoRootFolders(
   const folders = data.filter((item) => item.type === "dir");
   return folders.map((item) => item.name);
 }
+
+export async function getRepoRawContent(repoFilePath: string) {
+  const url = `https://raw.githubusercontent.com/${repoFilePath}`;
+  const response = await got(url, {
+    responseType: "text",
+  });
+  return response.body;
+}
diff --git a/helpers/types.ts b/helpers/types.ts
index 8346babffb8f1d0334b27d9afdb48d7ec0c1fc89..d7e6e92adc6b0fc7d8dd703f80c711a496d1172f 100644
--- a/helpers/types.ts
+++ b/helpers/types.ts
@@ -1,6 +1,6 @@
 import { PackageManager } from "../helpers/get-pkg-manager";
 
-export type TemplateType = "simple" | "streaming" | "community";
+export type TemplateType = "simple" | "streaming" | "community" | "llamapack";
 export type TemplateFramework = "nextjs" | "express" | "fastapi";
 export type TemplateEngine = "simple" | "context";
 export type TemplateUI = "html" | "shadcn";
@@ -23,6 +23,7 @@ export interface InstallTemplateArgs {
   forBackend?: string;
   model: string;
   communityProjectPath?: string;
+  llamapack?: string;
   vectorDb?: TemplateVectorDB;
   externalPort?: number;
   postInstallAction?: TemplatePostInstallAction;
diff --git a/index.ts b/index.ts
index 0da8419c2c757ae3aaf777695a895a7ad283e2ac..5978dd37af7daa6b8412481904add75165776e13 100644
--- a/index.ts
+++ b/index.ts
@@ -237,6 +237,7 @@ async function run(): Promise<void> {
     openAiKey: program.openAiKey,
     model: program.model,
     communityProjectPath: program.communityProjectPath,
+    llamapack: program.llamapack,
     vectorDb: program.vectorDb,
     externalPort: program.externalPort,
     postInstallAction: program.postInstallAction,
diff --git a/questions.ts b/questions.ts
index 989c9071d59b09a975b0478ee9c35ecab9b4e111..531a8642ed80151347f4994b8cd4b436cf71f335 100644
--- a/questions.ts
+++ b/questions.ts
@@ -7,6 +7,7 @@ import prompts from "prompts";
 import { InstallAppArgs } from "./create-app";
 import { TemplateFramework } from "./helpers";
 import { COMMUNITY_OWNER, COMMUNITY_REPO } from "./helpers/constant";
+import { getAvailableLlamapackOptions } from "./helpers/llama-pack";
 import { getRepoRootFolders } from "./helpers/repo";
 
 export type QuestionArgs = Omit<InstallAppArgs, "appPath" | "packageManager">;
@@ -37,6 +38,7 @@ const defaults: QuestionArgs = {
   openAiKey: "",
   model: "gpt-3.5-turbo",
   communityProjectPath: "",
+  llamapack: "",
   postInstallAction: "dependencies",
 };
 
@@ -129,6 +131,48 @@ export const askQuestions = async (
     field: K,
   ): QuestionArgs[K] => preferences[field] ?? defaults[field];
 
+  // Ask for next action after installation
+  async function askPostInstallAction() {
+    if (program.postInstallAction === undefined) {
+      if (ciInfo.isCI) {
+        program.postInstallAction = getPrefOrDefault("postInstallAction");
+      } else {
+        let actionChoices = [
+          {
+            title: "Just generate code (~1 sec)",
+            value: "none",
+          },
+          {
+            title: "Generate code and install dependencies (~2 min)",
+            value: "dependencies",
+          },
+        ];
+
+        const hasOpenAiKey = program.openAiKey || process.env["OPENAI_API_KEY"];
+        if (program.vectorDb === "none" && hasOpenAiKey) {
+          actionChoices.push({
+            title:
+              "Generate code, install dependencies, and run the app (~2 min)",
+            value: "runApp",
+          });
+        }
+
+        const { action } = await prompts(
+          {
+            type: "select",
+            name: "action",
+            message: "How would you like to proceed?",
+            choices: actionChoices,
+            initial: 1,
+          },
+          handlers,
+        );
+
+        program.postInstallAction = action;
+      }
+    }
+  }
+
   if (!program.template) {
     if (ciInfo.isCI) {
       program.template = getPrefOrDefault("template");
@@ -148,6 +192,10 @@ export const askQuestions = async (
               title: `Community template from ${styledRepo}`,
               value: "community",
             },
+            {
+              title: "Example using a LlamaPack",
+              value: "llamapack",
+            },
           ],
           initial: 1,
         },
@@ -181,6 +229,27 @@ export const askQuestions = async (
     return; // early return - no further questions needed for community projects
   }
 
+  if (program.template === "llamapack") {
+    const availableLlamaPacks = await getAvailableLlamapackOptions();
+    const { llamapack } = await prompts(
+      {
+        type: "select",
+        name: "llamapack",
+        message: "Select LlamaPack",
+        choices: availableLlamaPacks.map((pack) => ({
+          title: pack.name,
+          value: pack.folderPath,
+        })),
+        initial: 0,
+      },
+      handlers,
+    );
+    program.llamapack = llamapack;
+    preferences.llamapack = llamapack;
+    await askPostInstallAction();
+    return; // early return - no further questions needed for llamapack projects
+  }
+
   if (!program.framework) {
     if (ciInfo.isCI) {
       program.framework = getPrefOrDefault("framework");
@@ -386,45 +455,7 @@ export const askQuestions = async (
     }
   }
 
-  // Ask for next action after installation
-  if (program.postInstallAction === undefined) {
-    if (ciInfo.isCI) {
-      program.postInstallAction = getPrefOrDefault("postInstallAction");
-    } else {
-      let actionChoices = [
-        {
-          title: "Just generate code (~1 sec)",
-          value: "none",
-        },
-        {
-          title: "Generate code and install dependencies (~2 min)",
-          value: "dependencies",
-        },
-      ];
-
-      const hasOpenAiKey = program.openAiKey || process.env["OPENAI_API_KEY"];
-      if (program.vectorDb === "none" && hasOpenAiKey) {
-        actionChoices.push({
-          title:
-            "Generate code, install dependencies, and run the app (~2 min)",
-          value: "runApp",
-        });
-      }
-
-      const { action } = await prompts(
-        {
-          type: "select",
-          name: "action",
-          message: "How would you like to proceed?",
-          choices: actionChoices,
-          initial: 1,
-        },
-        handlers,
-      );
-
-      program.postInstallAction = action;
-    }
-  }
+  await askPostInstallAction();
 
   // TODO: consider using zod to validate the input (doesn't work like this as not every option is required)
   // templateUISchema.parse(program.ui);
diff --git a/templates/components/sample-projects/llamapack/README-template.md b/templates/components/sample-projects/llamapack/README-template.md
new file mode 100644
index 0000000000000000000000000000000000000000..f669c38c2be9506daea143fd08a6a42adb28f11d
--- /dev/null
+++ b/templates/components/sample-projects/llamapack/README-template.md
@@ -0,0 +1,16 @@
+---
+
+## Quickstart
+
+1. Check above instructions for setting up your environment and export required environment variables
+   For example, if you are using bash, you can run the following command to set up OpenAI API key
+
+```bash
+export OPENAI_API_KEY=your_api_key
+```
+
+2. Run the example
+
+```
+poetry run python example.py
+```
diff --git a/templates/components/sample-projects/llamapack/pyproject.toml b/templates/components/sample-projects/llamapack/pyproject.toml
new file mode 100644
index 0000000000000000000000000000000000000000..5e1934ca92f1e703c480931b102eef7ca8a4b09f
--- /dev/null
+++ b/templates/components/sample-projects/llamapack/pyproject.toml
@@ -0,0 +1,16 @@
+[tool.poetry]
+name = "app"
+version = "0.1.0"
+description = "Llama Pack Example"
+authors = ["Marcus Schiesser <mail@marcusschiesser.de>"]
+readme = "README.md"
+
+[tool.poetry.dependencies]
+python = "^3.11,<3.12"
+llama-index = "^0.9.19"
+python-dotenv = "^1.0.0"
+
+
+[build-system]
+requires = ["poetry-core"]
+build-backend = "poetry.core.masonry.api"