{ "dataset_revision": "d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46", "task_name": "SprintDuplicateQuestions", "mteb_version": "1.36.1", "scores": { "test": [ { "similarity_accuracy": 0.998, "similarity_accuracy_threshold": 0.71084, "similarity_f1": 0.897462, "similarity_f1_threshold": 0.71084, "similarity_precision": 0.91134, "similarity_recall": 0.884, "similarity_ap": 0.94322, "cosine_accuracy": 0.998, "cosine_accuracy_threshold": 0.71084, "cosine_f1": 0.897462, "cosine_f1_threshold": 0.71084, "cosine_precision": 0.91134, "cosine_recall": 0.884, "cosine_ap": 0.94322, "manhattan_accuracy": 0.997941, "manhattan_accuracy_threshold": 16.413431, "manhattan_f1": 0.896245, "manhattan_f1_threshold": 17.051424, "manhattan_precision": 0.885742, "manhattan_recall": 0.907, "manhattan_ap": 0.942284, "euclidean_accuracy": 0.998, "euclidean_accuracy_threshold": 0.760474, "euclidean_f1": 0.897462, "euclidean_f1_threshold": 0.760474, "euclidean_precision": 0.91134, "euclidean_recall": 0.884, "euclidean_ap": 0.94322, "dot_accuracy": 0.998, "dot_accuracy_threshold": 0.71084, "dot_f1": 0.897462, "dot_f1_threshold": 0.71084, "dot_precision": 0.91134, "dot_recall": 0.884, "dot_ap": 0.94322, "max_accuracy": 0.998, "max_f1": 0.897462, "max_precision": 0.91134, "max_recall": 0.907, "max_ap": 0.94322, "main_score": 0.94322, "hf_subset": "default", "languages": [ "eng-Latn" ] } ] }, "evaluation_time": 12.96372365951538, "kg_co2_emissions": null }