strandtest / test_fix.py
rmoxon's picture
Upload 8 files
9ee1beb verified
raw
history blame
818 Bytes
from transformers import CLIPProcessor, CLIPModel
from PIL import Image
import requests
import io
# Test the fix
model = CLIPModel.from_pretrained("openai/clip-vit-large-patch14")
processor = CLIPProcessor.from_pretrained("openai/clip-vit-large-patch14")
# Download test image
url = "https://xymtmeogzckraglhiuwt.supabase.co/storage/v1/object/public/pins/c1cfd4c9-77a3-4365-b38f-dda173e2a0c5/1750055972401.JPG"
response = requests.get(url)
image = Image.open(io.BytesIO(response.content))
if image.mode != 'RGB':
image = image.convert('RGB')
# Test the fix: images=[image] instead of images=image
try:
inputs = processor(images=[image], return_tensors="pt")
print("βœ… SUCCESS: Fix works!")
print(f"Input shape: {inputs['pixel_values'].shape}")
except Exception as e:
print(f"❌ FAILED: {e}")