Model Card for Model ID
Usage
Load model directly
from transformers import AutoTokenizer, AutoModelForSequenceClassification, AutoConfig
import torch.nn.functional as F
tokenizer = AutoTokenizer.from_pretrained("sourabhdattawad/mfc-xlm-roberta")
config = AutoConfig.from_pretrained("sourabhdattawad/mfc-xlm-roberta")
model = AutoModelForSequenceClassification.from_pretrained("sourabhdattawad/mfc-xlm-roberta")
news_text = """
Is the World Economy Sliding Into First Recession Since 2009?
The global economy is wobbling and whether it topples over is the big question in financial markets, executive suites and the corridors of power.
"""
encoded_input = tokenizer(news_text, return_tensors="pt", padding=True, truncation=True)
logits = model(**encoded_input).logits
scores = F.sigmoid(logits[0]).detach().numpy()
frames = [config.id2label[i] for i in range(len(scores)) if scores[i]>0.5]
print(frames)
['Economic', 'Capacity and resources']