|
{ |
|
"dataset_revision": "70970daeab8776df92f5ea462b6173c0b46fd2d1", |
|
"task_name": "TwitterSemEval2015", |
|
"mteb_version": "1.36.1", |
|
"scores": { |
|
"test": [ |
|
{ |
|
"similarity_accuracy": 0.828873, |
|
"similarity_accuracy_threshold": 0.679403, |
|
"similarity_f1": 0.603641, |
|
"similarity_f1_threshold": 0.605738, |
|
"similarity_precision": 0.558876, |
|
"similarity_recall": 0.656201, |
|
"similarity_ap": 0.63522, |
|
"cosine_accuracy": 0.828873, |
|
"cosine_accuracy_threshold": 0.679403, |
|
"cosine_f1": 0.603641, |
|
"cosine_f1_threshold": 0.605738, |
|
"cosine_precision": 0.558876, |
|
"cosine_recall": 0.656201, |
|
"cosine_ap": 0.63522, |
|
"manhattan_accuracy": 0.828098, |
|
"manhattan_accuracy_threshold": 17.39439, |
|
"manhattan_f1": 0.601751, |
|
"manhattan_f1_threshold": 19.615566, |
|
"manhattan_precision": 0.545474, |
|
"manhattan_recall": 0.670976, |
|
"manhattan_ap": 0.63421, |
|
"euclidean_accuracy": 0.828873, |
|
"euclidean_accuracy_threshold": 0.800746, |
|
"euclidean_f1": 0.603641, |
|
"euclidean_f1_threshold": 0.887989, |
|
"euclidean_precision": 0.558876, |
|
"euclidean_recall": 0.656201, |
|
"euclidean_ap": 0.63522, |
|
"dot_accuracy": 0.828873, |
|
"dot_accuracy_threshold": 0.679403, |
|
"dot_f1": 0.603641, |
|
"dot_f1_threshold": 0.605738, |
|
"dot_precision": 0.558876, |
|
"dot_recall": 0.656201, |
|
"dot_ap": 0.63522, |
|
"max_accuracy": 0.828873, |
|
"max_f1": 0.603641, |
|
"max_precision": 0.558876, |
|
"max_recall": 0.670976, |
|
"max_ap": 0.63522, |
|
"main_score": 0.63522, |
|
"hf_subset": "default", |
|
"languages": [ |
|
"eng-Latn" |
|
] |
|
} |
|
] |
|
}, |
|
"evaluation_time": 9.713029384613037, |
|
"kg_co2_emissions": null |
|
} |