Fix: Ensure model is moved to same device as inputs in example code
#14
by
anon-repair-bot
- opened
README.md
CHANGED
@@ -299,7 +299,7 @@ device = torch.device("cuda") if torch.cuda.is_available() else torch.device("cp
|
|
299 |
|
300 |
model_name = "MoritzLaurer/DeBERTa-v3-base-mnli-fever-anli"
|
301 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
302 |
-
model = AutoModelForSequenceClassification.from_pretrained(model_name)
|
303 |
|
304 |
premise = "I first thought that I liked the movie, but upon second thought it was actually disappointing."
|
305 |
hypothesis = "The movie was good."
|
|
|
299 |
|
300 |
model_name = "MoritzLaurer/DeBERTa-v3-base-mnli-fever-anli"
|
301 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
302 |
+
model = AutoModelForSequenceClassification.from_pretrained(model_name).to(device)
|
303 |
|
304 |
premise = "I first thought that I liked the movie, but upon second thought it was actually disappointing."
|
305 |
hypothesis = "The movie was good."
|