amber-large / mteb /results /TweetSentimentExtractionClassification.json
Katsumata420's picture
Upload 92 files
e470306 verified
{
"dataset_revision": "d604517c81ca91fe16a244d1248fc021f9ecee7a",
"task_name": "TweetSentimentExtractionClassification",
"mteb_version": "1.36.1",
"scores": {
"test": [
{
"accuracy": 0.617119,
"f1": 0.618672,
"f1_weighted": 0.607625,
"scores_per_experiment": [
{
"accuracy": 0.616865,
"f1": 0.615593,
"f1_weighted": 0.603738
},
{
"accuracy": 0.610074,
"f1": 0.613025,
"f1_weighted": 0.602026
},
{
"accuracy": 0.626486,
"f1": 0.628066,
"f1_weighted": 0.617477
},
{
"accuracy": 0.625637,
"f1": 0.628741,
"f1_weighted": 0.61942
},
{
"accuracy": 0.641483,
"f1": 0.643745,
"f1_weighted": 0.634746
},
{
"accuracy": 0.639219,
"f1": 0.64386,
"f1_weighted": 0.634796
},
{
"accuracy": 0.591964,
"f1": 0.592452,
"f1_weighted": 0.581805
},
{
"accuracy": 0.63837,
"f1": 0.641278,
"f1_weighted": 0.630303
},
{
"accuracy": 0.593096,
"f1": 0.589381,
"f1_weighted": 0.57362
},
{
"accuracy": 0.588002,
"f1": 0.590578,
"f1_weighted": 0.578322
}
],
"main_score": 0.617119,
"hf_subset": "default",
"languages": [
"eng-Latn"
]
}
]
},
"evaluation_time": 15.041564702987671,
"kg_co2_emissions": null
}