{ "dataset_revision": "fad2c6e8459f9e1c45d9315f4953d921437d70f8", "task_name": "MassiveScenarioClassification", "mteb_version": "1.36.1", "scores": { "test": [ { "accuracy": 0.711903, "f1": 0.710214, "f1_weighted": 0.707184, "scores_per_experiment": [ { "accuracy": 0.71957, "f1": 0.719742, "f1_weighted": 0.71563 }, { "accuracy": 0.737391, "f1": 0.730263, "f1_weighted": 0.733205 }, { "accuracy": 0.722596, "f1": 0.719952, "f1_weighted": 0.720578 }, { "accuracy": 0.712172, "f1": 0.703946, "f1_weighted": 0.708005 }, { "accuracy": 0.710491, "f1": 0.70161, "f1_weighted": 0.701415 }, { "accuracy": 0.691325, "f1": 0.691135, "f1_weighted": 0.681946 }, { "accuracy": 0.702085, "f1": 0.703073, "f1_weighted": 0.697202 }, { "accuracy": 0.691997, "f1": 0.697742, "f1_weighted": 0.690308 }, { "accuracy": 0.711836, "f1": 0.711827, "f1_weighted": 0.707059 }, { "accuracy": 0.71957, "f1": 0.72285, "f1_weighted": 0.716488 } ], "main_score": 0.711903, "hf_subset": "en", "languages": [ "eng-Latn" ] } ] }, "evaluation_time": 3.6203203201293945, "kg_co2_emissions": null }