Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -125,8 +125,8 @@ pipe.to(device=DEVICE)
|
|
| 125 |
|
| 126 |
from transformers import AutoProcessor, PaliGemmaForConditionalGeneration
|
| 127 |
|
| 128 |
-
pali = PaliGemmaForConditionalGeneration.from_pretrained('google/paligemma-3b-
|
| 129 |
-
processor = AutoProcessor.from_pretrained('google/paligemma-3b-
|
| 130 |
|
| 131 |
#pali = torch.compile(pali)
|
| 132 |
|
|
@@ -162,7 +162,7 @@ def generate_pali(n_embs):
|
|
| 162 |
decoded = processor.decode(generation[0], skip_special_tokens=True, clean_up_tokenization_spaces=True)
|
| 163 |
descs += f'Description: {decoded}\n'
|
| 164 |
else:
|
| 165 |
-
prompt = f'en {descs} Describe a new image that is similar.'
|
| 166 |
model_inputs = processor(text=prompt, images=torch.zeros(1, 3, 224, 224), return_tensors="pt")
|
| 167 |
input_len = model_inputs["input_ids"].shape[-1]
|
| 168 |
input_embeds = to_wanted_embs(emb,
|
|
|
|
| 125 |
|
| 126 |
from transformers import AutoProcessor, PaliGemmaForConditionalGeneration
|
| 127 |
|
| 128 |
+
pali = PaliGemmaForConditionalGeneration.from_pretrained('google/paligemma-3b-mix-224', torch_dtype=dtype).eval().to('cuda')
|
| 129 |
+
processor = AutoProcessor.from_pretrained('google/paligemma-3b-mix-224')
|
| 130 |
|
| 131 |
#pali = torch.compile(pali)
|
| 132 |
|
|
|
|
| 162 |
decoded = processor.decode(generation[0], skip_special_tokens=True, clean_up_tokenization_spaces=True)
|
| 163 |
descs += f'Description: {decoded}\n'
|
| 164 |
else:
|
| 165 |
+
prompt = f'en {descs} Describe a new image that is similar. Description:'
|
| 166 |
model_inputs = processor(text=prompt, images=torch.zeros(1, 3, 224, 224), return_tensors="pt")
|
| 167 |
input_len = model_inputs["input_ids"].shape[-1]
|
| 168 |
input_embeds = to_wanted_embs(emb,
|