Skip to content
Snippets Groups Projects
Unverified Commit ead70653 authored by Chandrashekar V.T's avatar Chandrashekar V.T Committed by GitHub
Browse files

Added support to load clip model from local file path (#12577)

parent 41763bd3
No related branches found
No related tags found
No related merge requests found
...@@ -7,6 +7,7 @@ from llama_index.core.constants import DEFAULT_EMBED_BATCH_SIZE ...@@ -7,6 +7,7 @@ from llama_index.core.constants import DEFAULT_EMBED_BATCH_SIZE
from llama_index.core.embeddings.multi_modal_base import MultiModalEmbedding from llama_index.core.embeddings.multi_modal_base import MultiModalEmbedding
from llama_index.core.schema import ImageType from llama_index.core.schema import ImageType
from PIL import Image from PIL import Image
import os
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
...@@ -86,7 +87,8 @@ class ClipEmbedding(MultiModalEmbedding): ...@@ -86,7 +87,8 @@ class ClipEmbedding(MultiModalEmbedding):
try: try:
self._device = "cuda" if torch.cuda.is_available() else "cpu" self._device = "cuda" if torch.cuda.is_available() else "cpu"
if self.model_name not in AVAILABLE_CLIP_MODELS: is_local_path = os.path.exists(self.model_name)
if not is_local_path and self.model_name not in AVAILABLE_CLIP_MODELS:
raise ValueError( raise ValueError(
f"Model name {self.model_name} is not available in CLIP." f"Model name {self.model_name} is not available in CLIP."
) )
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment