diff --git a/.changeset/two-masks-design.md b/.changeset/two-masks-design.md
new file mode 100644
index 0000000000000000000000000000000000000000..2d3f555c38cb17865fb50ae7c597ee8726cb0f41
--- /dev/null
+++ b/.changeset/two-masks-design.md
@@ -0,0 +1,5 @@
+---
+"create-llama": patch
+---
+
+Fix import error if the artifact tool is selected
diff --git a/helpers/providers/anthropic.ts b/helpers/providers/anthropic.ts
index db7e4f06554ae4801a4dadc4e9f60091941de566..080ffdeae5bf77384458f68daff65d45bc6c5573 100644
--- a/helpers/providers/anthropic.ts
+++ b/helpers/providers/anthropic.ts
@@ -1,4 +1,3 @@
-import ciInfo from "ci-info";
 import prompts from "prompts";
 import { ModelConfigParams } from ".";
 import { questionHandlers, toChoice } from "../../questions/utils";
@@ -70,9 +69,7 @@ export async function askAnthropicQuestions({
     config.apiKey = key || process.env.ANTHROPIC_API_KEY;
   }
 
-  // use default model values in CI or if user should not be asked
-  const useDefaults = ciInfo.isCI || !askModels;
-  if (!useDefaults) {
+  if (askModels) {
     const { model } = await prompts(
       {
         type: "select",
diff --git a/helpers/providers/azure.ts b/helpers/providers/azure.ts
index 28250ecee605bf0480d4e5d39b0a949cd2ab5028..8f3a3a71019d7d333c062d1def8036a23ec89593 100644
--- a/helpers/providers/azure.ts
+++ b/helpers/providers/azure.ts
@@ -1,4 +1,3 @@
-import ciInfo from "ci-info";
 import prompts from "prompts";
 import { ModelConfigParams, ModelConfigQuestionsParams } from ".";
 import { questionHandlers } from "../../questions/utils";
@@ -67,9 +66,7 @@ export async function askAzureQuestions({
     },
   };
 
-  // use default model values in CI or if user should not be asked
-  const useDefaults = ciInfo.isCI || !askModels;
-  if (!useDefaults) {
+  if (askModels) {
     const { model } = await prompts(
       {
         type: "select",
diff --git a/helpers/providers/gemini.ts b/helpers/providers/gemini.ts
index 50096bded9c6cc8c729fbaf9ba6825abc26e21ad..65b556c4d6a0887c4b9af5c6bd7b38ce8e1a3ab4 100644
--- a/helpers/providers/gemini.ts
+++ b/helpers/providers/gemini.ts
@@ -1,4 +1,3 @@
-import ciInfo from "ci-info";
 import prompts from "prompts";
 import { ModelConfigParams } from ".";
 import { questionHandlers, toChoice } from "../../questions/utils";
@@ -54,9 +53,7 @@ export async function askGeminiQuestions({
     config.apiKey = key || process.env.GOOGLE_API_KEY;
   }
 
-  // use default model values in CI or if user should not be asked
-  const useDefaults = ciInfo.isCI || !askModels;
-  if (!useDefaults) {
+  if (askModels) {
     const { model } = await prompts(
       {
         type: "select",
diff --git a/helpers/providers/groq.ts b/helpers/providers/groq.ts
index 33394475fb350eba8dd2e372e60713b5550e36c5..61b82a5dc3cf43f071d8c32a2279023d407ce72f 100644
--- a/helpers/providers/groq.ts
+++ b/helpers/providers/groq.ts
@@ -1,4 +1,3 @@
-import ciInfo from "ci-info";
 import prompts from "prompts";
 import { ModelConfigParams } from ".";
 import { questionHandlers, toChoice } from "../../questions/utils";
@@ -110,9 +109,7 @@ export async function askGroqQuestions({
     config.apiKey = key || process.env.GROQ_API_KEY;
   }
 
-  // use default model values in CI or if user should not be asked
-  const useDefaults = ciInfo.isCI || !askModels;
-  if (!useDefaults) {
+  if (askModels) {
     const modelChoices = await getAvailableModelChoicesGroq(config.apiKey!);
 
     const { model } = await prompts(
diff --git a/helpers/providers/llmhub.ts b/helpers/providers/llmhub.ts
index 0e4a610ee30515b40b666d474459b39ef03afc0a..531e5e431c489f964b154b172a19e8b197d4b90f 100644
--- a/helpers/providers/llmhub.ts
+++ b/helpers/providers/llmhub.ts
@@ -1,4 +1,3 @@
-import ciInfo from "ci-info";
 import got from "got";
 import ora from "ora";
 import { red } from "picocolors";
@@ -80,9 +79,7 @@ export async function askLLMHubQuestions({
     config.apiKey = key || process.env.T_SYSTEMS_LLMHUB_API_KEY;
   }
 
-  // use default model values in CI or if user should not be asked
-  const useDefaults = ciInfo.isCI || !askModels;
-  if (!useDefaults) {
+  if (askModels) {
     const { model } = await prompts(
       {
         type: "select",
diff --git a/helpers/providers/mistral.ts b/helpers/providers/mistral.ts
index c040b412db72dd607050bf00d52584e205f23f6a..1b11ae544fa1e4f6474d6412ac03a2b3bc9d1ec9 100644
--- a/helpers/providers/mistral.ts
+++ b/helpers/providers/mistral.ts
@@ -1,4 +1,3 @@
-import ciInfo from "ci-info";
 import prompts from "prompts";
 import { ModelConfigParams } from ".";
 import { questionHandlers, toChoice } from "../../questions/utils";
@@ -53,9 +52,7 @@ export async function askMistralQuestions({
     config.apiKey = key || process.env.MISTRAL_API_KEY;
   }
 
-  // use default model values in CI or if user should not be asked
-  const useDefaults = ciInfo.isCI || !askModels;
-  if (!useDefaults) {
+  if (askModels) {
     const { model } = await prompts(
       {
         type: "select",
diff --git a/helpers/providers/ollama.ts b/helpers/providers/ollama.ts
index cdcbcce64257696056854866102ccf5ab867f07a..b9c797e0e36380acb5e30df6d081b2eb2dfe43d6 100644
--- a/helpers/providers/ollama.ts
+++ b/helpers/providers/ollama.ts
@@ -1,4 +1,3 @@
-import ciInfo from "ci-info";
 import ollama, { type ModelResponse } from "ollama";
 import { red } from "picocolors";
 import prompts from "prompts";
@@ -34,9 +33,7 @@ export async function askOllamaQuestions({
     },
   };
 
-  // use default model values in CI or if user should not be asked
-  const useDefaults = ciInfo.isCI || !askModels;
-  if (!useDefaults) {
+  if (askModels) {
     const { model } = await prompts(
       {
         type: "select",
diff --git a/helpers/providers/openai.ts b/helpers/providers/openai.ts
index 6243f5b56ea71efba0942bac9a2344b6c7a242e8..74f9a42bc85ad910172d0fc83a9f2bdc27d2d686 100644
--- a/helpers/providers/openai.ts
+++ b/helpers/providers/openai.ts
@@ -1,4 +1,3 @@
-import ciInfo from "ci-info";
 import got from "got";
 import ora from "ora";
 import { red } from "picocolors";
@@ -54,9 +53,7 @@ export async function askOpenAIQuestions({
     config.apiKey = key || process.env.OPENAI_API_KEY;
   }
 
-  // use default model values in CI or if user should not be asked
-  const useDefaults = ciInfo.isCI || !askModels;
-  if (!useDefaults) {
+  if (askModels) {
     const { model } = await prompts(
       {
         type: "select",
diff --git a/helpers/tools.ts b/helpers/tools.ts
index 262e71b1d225a8cfb36a54362f214b163f35b5a9..5199f244d0c42d140d495766ebdac7e067b14be4 100644
--- a/helpers/tools.ts
+++ b/helpers/tools.ts
@@ -170,7 +170,7 @@ For better results, you can specify the region parameter to get results from a s
     dependencies: [
       {
         name: "e2b_code_interpreter",
-        version: "^0.0.11b38",
+        version: "0.0.11b38",
       },
     ],
     supportedFrameworks: ["fastapi", "express", "nextjs"],
diff --git a/questions/index.ts b/questions/index.ts
index 03b984e365fd7100cc1844b24ce69e227bf20803..7de3c9f658022e1424f552d6449d64a0adc2feb6 100644
--- a/questions/index.ts
+++ b/questions/index.ts
@@ -7,7 +7,7 @@ import { QuestionArgs, QuestionResults } from "./types";
 export const askQuestions = async (
   args: QuestionArgs,
 ): Promise<QuestionResults> => {
-  if (ciInfo.isCI) {
+  if (ciInfo.isCI || process.env.PLAYWRIGHT_TEST === "1") {
     return await getCIQuestionResults(args);
   } else if (args.pro) {
     // TODO: refactor pro questions to return a result object
diff --git a/templates/components/engines/python/agent/tools/document_generator.py b/templates/components/engines/python/agent/tools/document_generator.py
index 5609f1467ab9dc59ebbf774677b2d462611ab570..b97b92a9fe1cc0db811f5b2edc4656f92603002f 100644
--- a/templates/components/engines/python/agent/tools/document_generator.py
+++ b/templates/components/engines/python/agent/tools/document_generator.py
@@ -105,7 +105,7 @@ class DocumentGenerator:
         Generate HTML content from the original markdown content.
         """
         try:
-            import markdown
+            import markdown  # type: ignore
         except ImportError:
             raise ImportError(
                 "Failed to import required modules. Please install markdown."
diff --git a/templates/components/engines/python/agent/tools/img_gen.py b/templates/components/engines/python/agent/tools/img_gen.py
index 8c2ae7bc042cc04e146f608cfe1cea42dff558fb..17cf2d4ae5265c419561eec2632ab79af9df0545 100644
--- a/templates/components/engines/python/agent/tools/img_gen.py
+++ b/templates/components/engines/python/agent/tools/img_gen.py
@@ -3,7 +3,7 @@ import os
 import uuid
 from typing import Optional
 
-import requests
+import requests  # type: ignore
 from llama_index.core.tools import FunctionTool
 from pydantic import BaseModel, Field
 
diff --git a/templates/components/engines/python/agent/tools/openapi_action.py b/templates/components/engines/python/agent/tools/openapi_action.py
index c19187d2f78d3431beb2ca53d63a0dba002722e6..dbfd3e1da382d49ad401e2330f25df53ffa04d81 100644
--- a/templates/components/engines/python/agent/tools/openapi_action.py
+++ b/templates/components/engines/python/agent/tools/openapi_action.py
@@ -1,4 +1,5 @@
 from typing import Dict, List, Tuple
+
 from llama_index.tools.openapi import OpenAPIToolSpec
 from llama_index.tools.requests import RequestsToolSpec
 
@@ -43,11 +44,12 @@ class OpenAPIActionToolSpec(OpenAPIToolSpec, RequestsToolSpec):
         Returns:
             List[Document]: A list of Document objects.
         """
-        import yaml
         from urllib.parse import urlparse
 
+        import yaml  # type: ignore
+
         if uri.startswith("http"):
-            import requests
+            import requests  # type: ignore
 
             response = requests.get(uri)
             if response.status_code != 200:
diff --git a/templates/components/engines/python/agent/tools/weather.py b/templates/components/engines/python/agent/tools/weather.py
index c8b6f1b4cfd2f53c52e1ba415593313af67506b6..981f3771acf3dacf33e4658ac56cc358ba2b7064 100644
--- a/templates/components/engines/python/agent/tools/weather.py
+++ b/templates/components/engines/python/agent/tools/weather.py
@@ -1,8 +1,9 @@
 """Open Meteo weather map tool spec."""
 
 import logging
-import requests
-import pytz
+
+import pytz  # type: ignore
+import requests  # type: ignore
 from llama_index.core.tools import FunctionTool
 
 logger = logging.getLogger(__name__)
diff --git a/templates/components/routers/python/sandbox.py b/templates/components/routers/python/sandbox.py
index 0b07422ee89136f574a7ced2b08705f1205e04cd..28c0c3f1904add17663f0dfac2aaad524ae19aab 100644
--- a/templates/components/routers/python/sandbox.py
+++ b/templates/components/routers/python/sandbox.py
@@ -20,8 +20,8 @@ from dataclasses import asdict
 from typing import Any, Dict, List, Optional, Union
 
 from app.engine.tools.artifact import CodeArtifact
-from app.engine.utils.file_helper import save_file
-from e2b_code_interpreter import CodeInterpreter, Sandbox
+from app.services.file import FileService
+from e2b_code_interpreter import CodeInterpreter, Sandbox  # type: ignore
 from fastapi import APIRouter, HTTPException, Request
 from pydantic import BaseModel
 
@@ -175,7 +175,7 @@ def _download_cell_results(cell_results: Optional[List]) -> List[Dict[str, str]]
                     base64_data = data
                     buffer = base64.b64decode(base64_data)
                     file_name = f"{uuid.uuid4()}.{ext}"
-                    file_meta = save_file(
+                    file_meta = FileService.save_file(
                         content=buffer,
                         file_name=file_name,
                         save_dir=os.path.join("output", "tools"),
diff --git a/templates/components/vectordbs/python/llamacloud/generate.py b/templates/components/vectordbs/python/llamacloud/generate.py
index 6be271bd5c3cff9056541cd2767e6809f0935e5f..acd28777eea7461fab06a37490f221487da9bad3 100644
--- a/templates/components/vectordbs/python/llamacloud/generate.py
+++ b/templates/components/vectordbs/python/llamacloud/generate.py
@@ -8,7 +8,7 @@ load_dotenv()
 import logging
 
 from app.engine.index import get_client, get_index
-from app.engine.service import LLamaCloudFileService
+from app.engine.service import LLamaCloudFileService  # type: ignore
 from app.settings import init_settings
 from llama_cloud import PipelineType
 from llama_index.core.readers import SimpleDirectoryReader
diff --git a/templates/types/streaming/fastapi/app/api/routers/__init__.py b/templates/types/streaming/fastapi/app/api/routers/__init__.py
index 8c897aa51a407fc27ad7da6b3fc6d75a4b163e07..6c2654e5f3665741864821a27733973976955c8b 100644
--- a/templates/types/streaming/fastapi/app/api/routers/__init__.py
+++ b/templates/types/streaming/fastapi/app/api/routers/__init__.py
@@ -11,7 +11,7 @@ api_router.include_router(file_upload_router, prefix="/chat/upload")
 
 # Dynamically adding additional routers if they exist
 try:
-    from .sandbox import sandbox_router  # noqa: F401
+    from .sandbox import sandbox_router  # type: ignore
 
     api_router.include_router(sandbox_router, prefix="/sandbox")
 except ImportError:
diff --git a/templates/types/streaming/fastapi/app/api/routers/chat_config.py b/templates/types/streaming/fastapi/app/api/routers/chat_config.py
index 495ee99be18e9a4660c7da6a6cc68554b5d7ab1a..e3cacfc93061e1c139b43a4d0974b83f55e17312 100644
--- a/templates/types/streaming/fastapi/app/api/routers/chat_config.py
+++ b/templates/types/streaming/fastapi/app/api/routers/chat_config.py
@@ -12,7 +12,9 @@ logger = logging.getLogger("uvicorn")
 
 def _is_llama_cloud_service_configured():
     try:
-        from app.engine.service import LLamaCloudFileService  # noqa
+        from app.engine.service import (
+            LLamaCloudFileService,  # type: ignore # noqa: F401
+        )
 
         return True
     except ImportError:
@@ -20,7 +22,7 @@ def _is_llama_cloud_service_configured():
 
 
 async def chat_llama_cloud_config():
-    from app.engine.service import LLamaCloudFileService
+    from app.engine.service import LLamaCloudFileService  # type: ignore
 
     if not os.getenv("LLAMA_CLOUD_API_KEY"):
         raise HTTPException(
diff --git a/templates/types/streaming/fastapi/app/api/routers/vercel_response.py b/templates/types/streaming/fastapi/app/api/routers/vercel_response.py
index fc5f03e03a39f6490821f04fc47200dda8af265f..1155f6ba7a84570caa68c1ceb6611f34a9ef9ed3 100644
--- a/templates/types/streaming/fastapi/app/api/routers/vercel_response.py
+++ b/templates/types/streaming/fastapi/app/api/routers/vercel_response.py
@@ -138,7 +138,7 @@ class VercelStreamResponse(StreamingResponse):
     ):
         try:
             # Start background tasks to download documents from LlamaCloud if needed
-            from app.engine.service import LLamaCloudFileService
+            from app.engine.service import LLamaCloudFileService  # type: ignore
 
             LLamaCloudFileService.download_files_from_nodes(
                 source_nodes, background_tasks
diff --git a/templates/types/streaming/fastapi/app/services/file.py b/templates/types/streaming/fastapi/app/services/file.py
index 02e0084414aed4205293a14fae2b8a51499f1e6a..a551ea5f1a79736c5c346ee8c2269d59dcb756dd 100644
--- a/templates/types/streaming/fastapi/app/services/file.py
+++ b/templates/types/streaming/fastapi/app/services/file.py
@@ -241,7 +241,7 @@ class FileService:
         LlamaCloudIndex is a managed index so we can directly use the files.
         """
         try:
-            from app.engine.service import LLamaCloudFileService
+            from app.engine.service import LLamaCloudFileService  # type: ignore
         except ImportError as e:
             raise ValueError("LlamaCloudFileService is not found") from e
 
@@ -287,7 +287,7 @@ def _default_file_loaders_map():
 
 def _get_available_tools() -> Dict[str, List[FunctionTool]]:
     try:
-        from app.engine.tools import ToolFactory
+        from app.engine.tools import ToolFactory  # type: ignore
     except ImportError:
         logger.warning("ToolFactory not found, no tools will be available")
         return {}
diff --git a/templates/types/streaming/fastapi/pyproject.toml b/templates/types/streaming/fastapi/pyproject.toml
index 6a89416076a0a2422b87bc32dc2ec6d71f3bfed5..400991c52789fd35d97ab68945ee250ded1deb11 100644
--- a/templates/types/streaming/fastapi/pyproject.toml
+++ b/templates/types/streaming/fastapi/pyproject.toml
@@ -36,4 +36,8 @@ ignore_missing_imports = true
 follow_imports = "silent"
 implicit_optional = true
 strict_optional = false
-disable_error_code = ["return-value", "import-untyped", "assignment"]
+disable_error_code = ["return-value", "assignment"]
+
+[[tool.mypy.overrides]]
+module = "app.*"
+ignore_missing_imports = false