Spaces:
Paused
Paused
Upload app.py
Browse files
app.py
CHANGED
@@ -8,12 +8,12 @@ try:
|
|
8 |
from transformers import ClapModel, ClapProcessor
|
9 |
CLAP_AVAILABLE = True
|
10 |
CLAP_METHOD = "transformers"
|
11 |
-
except ImportError:
|
12 |
try:
|
13 |
import laion_clap
|
14 |
CLAP_AVAILABLE = True
|
15 |
CLAP_METHOD = "laion"
|
16 |
-
except ImportError:
|
17 |
CLAP_AVAILABLE = False
|
18 |
CLAP_METHOD = None
|
19 |
import torch
|
@@ -38,36 +38,41 @@ logger = logging.getLogger(__name__)
|
|
38 |
|
39 |
app = FastAPI(title="CLIP Service", version="1.0.0")
|
40 |
|
|
|
|
|
|
|
41 |
class CLIPService:
|
42 |
def __init__(self):
|
43 |
logger.info("Loading CLIP model...")
|
|
|
|
|
|
|
44 |
try:
|
45 |
# Use CPU for Hugging Face free tier
|
46 |
self.device = "cuda" if torch.cuda.is_available() else "cpu"
|
47 |
logger.info(f"Using device: {self.device}")
|
48 |
|
49 |
# Load CLIP model with explicit cache directory
|
|
|
50 |
self.clip_model = CLIPModel.from_pretrained(
|
51 |
"openai/clip-vit-large-patch14",
|
52 |
cache_dir=cache_dir,
|
53 |
local_files_only=False
|
54 |
).to(self.device)
|
55 |
|
|
|
56 |
self.clip_processor = CLIPProcessor.from_pretrained(
|
57 |
"openai/clip-vit-large-patch14",
|
58 |
cache_dir=cache_dir,
|
59 |
local_files_only=False
|
60 |
)
|
61 |
|
62 |
-
# Initialize CLAP model placeholders (loaded on demand)
|
63 |
-
self.clap_model = None
|
64 |
-
self.clap_processor = None
|
65 |
-
|
66 |
logger.info(f"CLIP model loaded successfully on {self.device}")
|
67 |
|
68 |
except Exception as e:
|
69 |
logger.error(f"Failed to load CLIP model: {str(e)}")
|
70 |
-
|
|
|
71 |
|
72 |
def _load_clap_model(self):
|
73 |
"""Load CLAP model on demand"""
|
@@ -297,6 +302,7 @@ try:
|
|
297 |
logger.info("CLIP service initialized successfully!")
|
298 |
except Exception as e:
|
299 |
logger.error(f"Failed to initialize CLIP service: {str(e)}")
|
|
|
300 |
# For now, we'll let the app start but service calls will fail gracefully
|
301 |
clip_service = None
|
302 |
|
|
|
8 |
from transformers import ClapModel, ClapProcessor
|
9 |
CLAP_AVAILABLE = True
|
10 |
CLAP_METHOD = "transformers"
|
11 |
+
except ImportError as e1:
|
12 |
try:
|
13 |
import laion_clap
|
14 |
CLAP_AVAILABLE = True
|
15 |
CLAP_METHOD = "laion"
|
16 |
+
except ImportError as e2:
|
17 |
CLAP_AVAILABLE = False
|
18 |
CLAP_METHOD = None
|
19 |
import torch
|
|
|
38 |
|
39 |
app = FastAPI(title="CLIP Service", version="1.0.0")
|
40 |
|
41 |
+
# Log CLAP availability after logger is initialized
|
42 |
+
logger.info(f"CLAP availability: {CLAP_AVAILABLE}, method: {CLAP_METHOD}")
|
43 |
+
|
44 |
class CLIPService:
|
45 |
def __init__(self):
|
46 |
logger.info("Loading CLIP model...")
|
47 |
+
self.clap_model = None
|
48 |
+
self.clap_processor = None
|
49 |
+
|
50 |
try:
|
51 |
# Use CPU for Hugging Face free tier
|
52 |
self.device = "cuda" if torch.cuda.is_available() else "cpu"
|
53 |
logger.info(f"Using device: {self.device}")
|
54 |
|
55 |
# Load CLIP model with explicit cache directory
|
56 |
+
logger.info("Loading CLIP model from HuggingFace...")
|
57 |
self.clip_model = CLIPModel.from_pretrained(
|
58 |
"openai/clip-vit-large-patch14",
|
59 |
cache_dir=cache_dir,
|
60 |
local_files_only=False
|
61 |
).to(self.device)
|
62 |
|
63 |
+
logger.info("Loading CLIP processor...")
|
64 |
self.clip_processor = CLIPProcessor.from_pretrained(
|
65 |
"openai/clip-vit-large-patch14",
|
66 |
cache_dir=cache_dir,
|
67 |
local_files_only=False
|
68 |
)
|
69 |
|
|
|
|
|
|
|
|
|
70 |
logger.info(f"CLIP model loaded successfully on {self.device}")
|
71 |
|
72 |
except Exception as e:
|
73 |
logger.error(f"Failed to load CLIP model: {str(e)}")
|
74 |
+
logger.error(f"Error type: {type(e).__name__}")
|
75 |
+
raise RuntimeError(f"CLIP model loading failed: {str(e)}")
|
76 |
|
77 |
def _load_clap_model(self):
|
78 |
"""Load CLAP model on demand"""
|
|
|
302 |
logger.info("CLIP service initialized successfully!")
|
303 |
except Exception as e:
|
304 |
logger.error(f"Failed to initialize CLIP service: {str(e)}")
|
305 |
+
logger.error(f"Error details: {type(e).__name__}: {str(e)}")
|
306 |
# For now, we'll let the app start but service calls will fail gracefully
|
307 |
clip_service = None
|
308 |
|