Add results for voyage model (#151)
Browse files* add results for voyage
* renamed model folder
* remove
This view is limited to 50 files because it contains too many changes.
See raw diff
- pyproject.toml +7 -1
- results/voyageai__voyage-2/1/AmazonCounterfactualClassification.json +95 -0
- results/voyageai__voyage-2/1/AmazonReviewsClassification.json +73 -0
- results/voyageai__voyage-2/1/ArXivHierarchicalClusteringP2P.json +46 -0
- results/voyageai__voyage-2/1/ArXivHierarchicalClusteringS2S.json +46 -0
- results/voyageai__voyage-2/1/BIOSSES.json +26 -0
- results/voyageai__voyage-2/1/Banking77Classification.json +73 -0
- results/voyageai__voyage-2/1/BiorxivClusteringP2P.v2.json +34 -0
- results/voyageai__voyage-2/1/MTOPDomainClassification.json +73 -0
- results/voyageai__voyage-2/1/MassiveIntentClassification.json +73 -0
- results/voyageai__voyage-2/1/MassiveScenarioClassification.json +73 -0
- results/voyageai__voyage-2/1/MedrxivClusteringP2P.v2.json +34 -0
- results/voyageai__voyage-2/1/MedrxivClusteringS2S.v2.json +34 -0
- results/voyageai__voyage-2/1/SICK-R.json +26 -0
- results/voyageai__voyage-2/1/STS12.json +26 -0
- results/voyageai__voyage-2/1/STS13.json +26 -0
- results/voyageai__voyage-2/1/STS14.json +26 -0
- results/voyageai__voyage-2/1/STS15.json +26 -0
- results/voyageai__voyage-2/1/STS17.json +26 -0
- results/voyageai__voyage-2/1/STS22.v2.json +26 -0
- results/voyageai__voyage-2/1/STSBenchmark.json +26 -0
- results/voyageai__voyage-2/1/SprintDuplicateQuestions.json +58 -0
- results/voyageai__voyage-2/1/StackExchangeClustering.v2.json +34 -0
- results/voyageai__voyage-2/1/StackExchangeClusteringP2P.v2.json +34 -0
- results/voyageai__voyage-2/1/SummEvalSummarization.v2.json +24 -0
- results/voyageai__voyage-2/1/ToxicConversationsClassification.json +95 -0
- results/voyageai__voyage-2/1/TweetSentimentExtractionClassification.json +73 -0
- results/voyageai__voyage-2/1/TwentyNewsgroupsClustering.v2.json +34 -0
- results/voyageai__voyage-2/1/TwitterSemEval2015.json +58 -0
- results/voyageai__voyage-2/1/TwitterURLCorpus.json +58 -0
- results/voyageai__voyage-2/1/model_meta.json +1 -0
- results/voyageai__voyage-3-lite/1/STS22.json +26 -0
- results/voyageai__voyage-3-lite/1/model_meta.json +1 -1
- results/voyageai__voyage-3-m-exp/1/ArXivHierarchicalClusteringP2P.json +46 -0
- results/voyageai__voyage-3-m-exp/1/ArXivHierarchicalClusteringS2S.json +46 -0
- results/voyageai__voyage-3-m-exp/1/BiorxivClusteringP2P.v2.json +34 -0
- results/voyageai__voyage-3-m-exp/1/MedrxivClusteringP2P.v2.json +34 -0
- results/voyageai__voyage-3-m-exp/1/MedrxivClusteringS2S.v2.json +34 -0
- results/voyageai__voyage-3-m-exp/1/STS22.v2.json +26 -0
- results/voyageai__voyage-3-m-exp/1/StackExchangeClustering.v2.json +34 -0
- results/voyageai__voyage-3-m-exp/1/StackExchangeClusteringP2P.v2.json +34 -0
- results/voyageai__voyage-3-m-exp/1/SummEvalSummarization.v2.json +24 -0
- results/voyageai__voyage-3-m-exp/1/TwentyNewsgroupsClustering.v2.json +34 -0
- results/voyageai__voyage-3-m-exp/1/model_meta.json +1 -1
- results/voyageai__voyage-3/1/STS22.json +26 -0
- results/voyageai__voyage-3/1/model_meta.json +1 -1
- results/voyageai__voyage-code-2/1/AmazonCounterfactualClassification.json +95 -0
- results/voyageai__voyage-code-2/1/ArXivHierarchicalClusteringP2P.json +46 -0
- results/voyageai__voyage-code-2/1/ArXivHierarchicalClusteringS2S.json +46 -0
- results/voyageai__voyage-code-2/1/BIOSSES.json +26 -0
pyproject.toml
CHANGED
@@ -3,5 +3,11 @@ name = "results"
|
|
3 |
version = "0.1.0"
|
4 |
description = "The result repository for mteb"
|
5 |
readme = "README.md"
|
6 |
-
requires-python = ">=3.9"
|
7 |
dependencies = ["mteb[dev]>=1.13.0"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
3 |
version = "0.1.0"
|
4 |
description = "The result repository for mteb"
|
5 |
readme = "README.md"
|
6 |
+
requires-python = ">=3.9,<3.13" # pytrec-eval-terrier does not compile for 3.13
|
7 |
dependencies = ["mteb[dev]>=1.13.0"]
|
8 |
+
|
9 |
+
[dependency-groups]
|
10 |
+
dev = [
|
11 |
+
"ipykernel>=6.29.5",
|
12 |
+
"pip>=25.0.1",
|
13 |
+
]
|
results/voyageai__voyage-2/1/AmazonCounterfactualClassification.json
ADDED
@@ -0,0 +1,95 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "e8379541af4e31359cca9fbcf4b00f2671dba205",
|
3 |
+
"task_name": "AmazonCounterfactualClassification",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"accuracy": 0.695522,
|
9 |
+
"f1": 0.634324,
|
10 |
+
"f1_weighted": 0.724555,
|
11 |
+
"ap": 0.321044,
|
12 |
+
"ap_weighted": 0.321044,
|
13 |
+
"scores_per_experiment": [
|
14 |
+
{
|
15 |
+
"accuracy": 0.649254,
|
16 |
+
"f1": 0.583037,
|
17 |
+
"f1_weighted": 0.684222,
|
18 |
+
"ap": 0.268174,
|
19 |
+
"ap_weighted": 0.268174
|
20 |
+
},
|
21 |
+
{
|
22 |
+
"accuracy": 0.753731,
|
23 |
+
"f1": 0.688285,
|
24 |
+
"f1_weighted": 0.775263,
|
25 |
+
"ap": 0.370247,
|
26 |
+
"ap_weighted": 0.370247
|
27 |
+
},
|
28 |
+
{
|
29 |
+
"accuracy": 0.58806,
|
30 |
+
"f1": 0.552854,
|
31 |
+
"f1_weighted": 0.629258,
|
32 |
+
"ap": 0.272517,
|
33 |
+
"ap_weighted": 0.272517
|
34 |
+
},
|
35 |
+
{
|
36 |
+
"accuracy": 0.695522,
|
37 |
+
"f1": 0.633294,
|
38 |
+
"f1_weighted": 0.725284,
|
39 |
+
"ap": 0.315682,
|
40 |
+
"ap_weighted": 0.315682
|
41 |
+
},
|
42 |
+
{
|
43 |
+
"accuracy": 0.707463,
|
44 |
+
"f1": 0.651741,
|
45 |
+
"f1_weighted": 0.736571,
|
46 |
+
"ap": 0.340628,
|
47 |
+
"ap_weighted": 0.340628
|
48 |
+
},
|
49 |
+
{
|
50 |
+
"accuracy": 0.726866,
|
51 |
+
"f1": 0.670555,
|
52 |
+
"f1_weighted": 0.753497,
|
53 |
+
"ap": 0.360029,
|
54 |
+
"ap_weighted": 0.360029
|
55 |
+
},
|
56 |
+
{
|
57 |
+
"accuracy": 0.753731,
|
58 |
+
"f1": 0.671642,
|
59 |
+
"f1_weighted": 0.77162,
|
60 |
+
"ap": 0.339013,
|
61 |
+
"ap_weighted": 0.339013
|
62 |
+
},
|
63 |
+
{
|
64 |
+
"accuracy": 0.732836,
|
65 |
+
"f1": 0.66528,
|
66 |
+
"f1_weighted": 0.756851,
|
67 |
+
"ap": 0.343198,
|
68 |
+
"ap_weighted": 0.343198
|
69 |
+
},
|
70 |
+
{
|
71 |
+
"accuracy": 0.702985,
|
72 |
+
"f1": 0.645907,
|
73 |
+
"f1_weighted": 0.732479,
|
74 |
+
"ap": 0.333184,
|
75 |
+
"ap_weighted": 0.333184
|
76 |
+
},
|
77 |
+
{
|
78 |
+
"accuracy": 0.644776,
|
79 |
+
"f1": 0.580651,
|
80 |
+
"f1_weighted": 0.68051,
|
81 |
+
"ap": 0.267764,
|
82 |
+
"ap_weighted": 0.267764
|
83 |
+
}
|
84 |
+
],
|
85 |
+
"main_score": 0.695522,
|
86 |
+
"hf_subset": "en",
|
87 |
+
"languages": [
|
88 |
+
"eng-Latn"
|
89 |
+
]
|
90 |
+
}
|
91 |
+
]
|
92 |
+
},
|
93 |
+
"evaluation_time": 22.0602068901062,
|
94 |
+
"kg_co2_emissions": null
|
95 |
+
}
|
results/voyageai__voyage-2/1/AmazonReviewsClassification.json
ADDED
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "1399c76144fd37290681b995c656ef9b2e06e26d",
|
3 |
+
"task_name": "AmazonReviewsClassification",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"accuracy": 0.42068,
|
9 |
+
"f1": 0.406899,
|
10 |
+
"f1_weighted": 0.406899,
|
11 |
+
"scores_per_experiment": [
|
12 |
+
{
|
13 |
+
"accuracy": 0.444,
|
14 |
+
"f1": 0.424666,
|
15 |
+
"f1_weighted": 0.424666
|
16 |
+
},
|
17 |
+
{
|
18 |
+
"accuracy": 0.4292,
|
19 |
+
"f1": 0.407158,
|
20 |
+
"f1_weighted": 0.407158
|
21 |
+
},
|
22 |
+
{
|
23 |
+
"accuracy": 0.4102,
|
24 |
+
"f1": 0.402591,
|
25 |
+
"f1_weighted": 0.402591
|
26 |
+
},
|
27 |
+
{
|
28 |
+
"accuracy": 0.4322,
|
29 |
+
"f1": 0.418908,
|
30 |
+
"f1_weighted": 0.418908
|
31 |
+
},
|
32 |
+
{
|
33 |
+
"accuracy": 0.4268,
|
34 |
+
"f1": 0.40691,
|
35 |
+
"f1_weighted": 0.40691
|
36 |
+
},
|
37 |
+
{
|
38 |
+
"accuracy": 0.4072,
|
39 |
+
"f1": 0.398244,
|
40 |
+
"f1_weighted": 0.398244
|
41 |
+
},
|
42 |
+
{
|
43 |
+
"accuracy": 0.4012,
|
44 |
+
"f1": 0.391319,
|
45 |
+
"f1_weighted": 0.391319
|
46 |
+
},
|
47 |
+
{
|
48 |
+
"accuracy": 0.4362,
|
49 |
+
"f1": 0.439332,
|
50 |
+
"f1_weighted": 0.439332
|
51 |
+
},
|
52 |
+
{
|
53 |
+
"accuracy": 0.4164,
|
54 |
+
"f1": 0.396183,
|
55 |
+
"f1_weighted": 0.396183
|
56 |
+
},
|
57 |
+
{
|
58 |
+
"accuracy": 0.4034,
|
59 |
+
"f1": 0.383678,
|
60 |
+
"f1_weighted": 0.383678
|
61 |
+
}
|
62 |
+
],
|
63 |
+
"main_score": 0.42068,
|
64 |
+
"hf_subset": "en",
|
65 |
+
"languages": [
|
66 |
+
"eng-Latn"
|
67 |
+
]
|
68 |
+
}
|
69 |
+
]
|
70 |
+
},
|
71 |
+
"evaluation_time": 283.1336479187012,
|
72 |
+
"kg_co2_emissions": null
|
73 |
+
}
|
results/voyageai__voyage-2/1/ArXivHierarchicalClusteringP2P.json
ADDED
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "0bbdb47bcbe3a90093699aefeed338a0f28a7ee8",
|
3 |
+
"task_name": "ArXivHierarchicalClusteringP2P",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"v_measures": {
|
9 |
+
"Level 0": [
|
10 |
+
0.562404,
|
11 |
+
0.599307,
|
12 |
+
0.600505,
|
13 |
+
0.604632,
|
14 |
+
0.542921,
|
15 |
+
0.552999,
|
16 |
+
0.533494,
|
17 |
+
0.590038,
|
18 |
+
0.610404,
|
19 |
+
0.552781
|
20 |
+
],
|
21 |
+
"Level 1": [
|
22 |
+
0.562446,
|
23 |
+
0.574968,
|
24 |
+
0.571951,
|
25 |
+
0.576544,
|
26 |
+
0.563843,
|
27 |
+
0.55949,
|
28 |
+
0.597775,
|
29 |
+
0.576352,
|
30 |
+
0.536795,
|
31 |
+
0.626181
|
32 |
+
]
|
33 |
+
},
|
34 |
+
"v_measure": 0.574792,
|
35 |
+
"v_measure_std": 0.02512,
|
36 |
+
"main_score": 0.574792,
|
37 |
+
"hf_subset": "default",
|
38 |
+
"languages": [
|
39 |
+
"eng-Latn"
|
40 |
+
]
|
41 |
+
}
|
42 |
+
]
|
43 |
+
},
|
44 |
+
"evaluation_time": 1000.5131340026855,
|
45 |
+
"kg_co2_emissions": null
|
46 |
+
}
|
results/voyageai__voyage-2/1/ArXivHierarchicalClusteringS2S.json
ADDED
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "b73bd54100e5abfa6e3a23dcafb46fe4d2438dc3",
|
3 |
+
"task_name": "ArXivHierarchicalClusteringS2S",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"v_measures": {
|
9 |
+
"Level 0": [
|
10 |
+
0.573292,
|
11 |
+
0.530668,
|
12 |
+
0.498832,
|
13 |
+
0.565711,
|
14 |
+
0.522439,
|
15 |
+
0.476578,
|
16 |
+
0.58589,
|
17 |
+
0.582446,
|
18 |
+
0.512015,
|
19 |
+
0.521227
|
20 |
+
],
|
21 |
+
"Level 1": [
|
22 |
+
0.549254,
|
23 |
+
0.563597,
|
24 |
+
0.557765,
|
25 |
+
0.588363,
|
26 |
+
0.55805,
|
27 |
+
0.545452,
|
28 |
+
0.528399,
|
29 |
+
0.574015,
|
30 |
+
0.520908,
|
31 |
+
0.560871
|
32 |
+
]
|
33 |
+
},
|
34 |
+
"v_measure": 0.545789,
|
35 |
+
"v_measure_std": 0.029999,
|
36 |
+
"main_score": 0.545789,
|
37 |
+
"hf_subset": "default",
|
38 |
+
"languages": [
|
39 |
+
"eng-Latn"
|
40 |
+
]
|
41 |
+
}
|
42 |
+
]
|
43 |
+
},
|
44 |
+
"evaluation_time": 4.2193968296051025,
|
45 |
+
"kg_co2_emissions": null
|
46 |
+
}
|
results/voyageai__voyage-2/1/BIOSSES.json
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "d3fb88f8f02e40887cd149695127462bbcf29b4a",
|
3 |
+
"task_name": "BIOSSES",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"pearson": 0.885357,
|
9 |
+
"spearman": 0.87346,
|
10 |
+
"cosine_pearson": 0.885357,
|
11 |
+
"cosine_spearman": 0.87346,
|
12 |
+
"manhattan_pearson": 0.876676,
|
13 |
+
"manhattan_spearman": 0.874797,
|
14 |
+
"euclidean_pearson": 0.878306,
|
15 |
+
"euclidean_spearman": 0.87346,
|
16 |
+
"main_score": 0.87346,
|
17 |
+
"hf_subset": "default",
|
18 |
+
"languages": [
|
19 |
+
"eng-Latn"
|
20 |
+
]
|
21 |
+
}
|
22 |
+
]
|
23 |
+
},
|
24 |
+
"evaluation_time": 2.4972140789031982,
|
25 |
+
"kg_co2_emissions": null
|
26 |
+
}
|
results/voyageai__voyage-2/1/Banking77Classification.json
ADDED
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "0fd18e25b25c072e09e0d92ab615fda904d66300",
|
3 |
+
"task_name": "Banking77Classification",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"accuracy": 0.776234,
|
9 |
+
"f1": 0.765069,
|
10 |
+
"f1_weighted": 0.765069,
|
11 |
+
"scores_per_experiment": [
|
12 |
+
{
|
13 |
+
"accuracy": 0.786688,
|
14 |
+
"f1": 0.775862,
|
15 |
+
"f1_weighted": 0.775862
|
16 |
+
},
|
17 |
+
{
|
18 |
+
"accuracy": 0.787662,
|
19 |
+
"f1": 0.776575,
|
20 |
+
"f1_weighted": 0.776575
|
21 |
+
},
|
22 |
+
{
|
23 |
+
"accuracy": 0.793831,
|
24 |
+
"f1": 0.782625,
|
25 |
+
"f1_weighted": 0.782625
|
26 |
+
},
|
27 |
+
{
|
28 |
+
"accuracy": 0.771429,
|
29 |
+
"f1": 0.764319,
|
30 |
+
"f1_weighted": 0.764319
|
31 |
+
},
|
32 |
+
{
|
33 |
+
"accuracy": 0.760065,
|
34 |
+
"f1": 0.744945,
|
35 |
+
"f1_weighted": 0.744945
|
36 |
+
},
|
37 |
+
{
|
38 |
+
"accuracy": 0.786039,
|
39 |
+
"f1": 0.779857,
|
40 |
+
"f1_weighted": 0.779857
|
41 |
+
},
|
42 |
+
{
|
43 |
+
"accuracy": 0.765909,
|
44 |
+
"f1": 0.75367,
|
45 |
+
"f1_weighted": 0.75367
|
46 |
+
},
|
47 |
+
{
|
48 |
+
"accuracy": 0.766558,
|
49 |
+
"f1": 0.754385,
|
50 |
+
"f1_weighted": 0.754385
|
51 |
+
},
|
52 |
+
{
|
53 |
+
"accuracy": 0.777273,
|
54 |
+
"f1": 0.762788,
|
55 |
+
"f1_weighted": 0.762788
|
56 |
+
},
|
57 |
+
{
|
58 |
+
"accuracy": 0.766883,
|
59 |
+
"f1": 0.755661,
|
60 |
+
"f1_weighted": 0.755661
|
61 |
+
}
|
62 |
+
],
|
63 |
+
"main_score": 0.776234,
|
64 |
+
"hf_subset": "default",
|
65 |
+
"languages": [
|
66 |
+
"eng-Latn"
|
67 |
+
]
|
68 |
+
}
|
69 |
+
]
|
70 |
+
},
|
71 |
+
"evaluation_time": 95.84986591339111,
|
72 |
+
"kg_co2_emissions": null
|
73 |
+
}
|
results/voyageai__voyage-2/1/BiorxivClusteringP2P.v2.json
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "f5dbc242e11dd8e24def4c4268607a49e02946dc",
|
3 |
+
"task_name": "BiorxivClusteringP2P.v2",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"v_measures": {
|
9 |
+
"Level 0": [
|
10 |
+
0.407808,
|
11 |
+
0.426368,
|
12 |
+
0.404679,
|
13 |
+
0.429346,
|
14 |
+
0.405718,
|
15 |
+
0.444299,
|
16 |
+
0.401083,
|
17 |
+
0.423941,
|
18 |
+
0.405699,
|
19 |
+
0.41511
|
20 |
+
]
|
21 |
+
},
|
22 |
+
"v_measure": 0.416405,
|
23 |
+
"v_measure_std": 0.013335,
|
24 |
+
"main_score": 0.416405,
|
25 |
+
"hf_subset": "default",
|
26 |
+
"languages": [
|
27 |
+
"eng-Latn"
|
28 |
+
]
|
29 |
+
}
|
30 |
+
]
|
31 |
+
},
|
32 |
+
"evaluation_time": 134.63581013679504,
|
33 |
+
"kg_co2_emissions": null
|
34 |
+
}
|
results/voyageai__voyage-2/1/MTOPDomainClassification.json
ADDED
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "d80d48c1eb48d3562165c59d59d0034df9fff0bf",
|
3 |
+
"task_name": "MTOPDomainClassification",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"accuracy": 0.933744,
|
9 |
+
"f1": 0.932139,
|
10 |
+
"f1_weighted": 0.933551,
|
11 |
+
"scores_per_experiment": [
|
12 |
+
{
|
13 |
+
"accuracy": 0.932513,
|
14 |
+
"f1": 0.929492,
|
15 |
+
"f1_weighted": 0.932264
|
16 |
+
},
|
17 |
+
{
|
18 |
+
"accuracy": 0.938668,
|
19 |
+
"f1": 0.9373,
|
20 |
+
"f1_weighted": 0.93843
|
21 |
+
},
|
22 |
+
{
|
23 |
+
"accuracy": 0.930005,
|
24 |
+
"f1": 0.929601,
|
25 |
+
"f1_weighted": 0.929697
|
26 |
+
},
|
27 |
+
{
|
28 |
+
"accuracy": 0.933881,
|
29 |
+
"f1": 0.931776,
|
30 |
+
"f1_weighted": 0.933786
|
31 |
+
},
|
32 |
+
{
|
33 |
+
"accuracy": 0.938896,
|
34 |
+
"f1": 0.934557,
|
35 |
+
"f1_weighted": 0.939055
|
36 |
+
},
|
37 |
+
{
|
38 |
+
"accuracy": 0.927041,
|
39 |
+
"f1": 0.93017,
|
40 |
+
"f1_weighted": 0.926561
|
41 |
+
},
|
42 |
+
{
|
43 |
+
"accuracy": 0.917009,
|
44 |
+
"f1": 0.916665,
|
45 |
+
"f1_weighted": 0.916508
|
46 |
+
},
|
47 |
+
{
|
48 |
+
"accuracy": 0.944596,
|
49 |
+
"f1": 0.941677,
|
50 |
+
"f1_weighted": 0.944425
|
51 |
+
},
|
52 |
+
{
|
53 |
+
"accuracy": 0.926585,
|
54 |
+
"f1": 0.925577,
|
55 |
+
"f1_weighted": 0.926512
|
56 |
+
},
|
57 |
+
{
|
58 |
+
"accuracy": 0.948244,
|
59 |
+
"f1": 0.944576,
|
60 |
+
"f1_weighted": 0.948275
|
61 |
+
}
|
62 |
+
],
|
63 |
+
"main_score": 0.933744,
|
64 |
+
"hf_subset": "en",
|
65 |
+
"languages": [
|
66 |
+
"eng-Latn"
|
67 |
+
]
|
68 |
+
}
|
69 |
+
]
|
70 |
+
},
|
71 |
+
"evaluation_time": 45.53554368019104,
|
72 |
+
"kg_co2_emissions": null
|
73 |
+
}
|
results/voyageai__voyage-2/1/MassiveIntentClassification.json
ADDED
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "4672e20407010da34463acc759c162ca9734bca6",
|
3 |
+
"task_name": "MassiveIntentClassification",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"accuracy": 0.668964,
|
9 |
+
"f1": 0.628387,
|
10 |
+
"f1_weighted": 0.660862,
|
11 |
+
"scores_per_experiment": [
|
12 |
+
{
|
13 |
+
"accuracy": 0.688635,
|
14 |
+
"f1": 0.638339,
|
15 |
+
"f1_weighted": 0.67929
|
16 |
+
},
|
17 |
+
{
|
18 |
+
"accuracy": 0.68729,
|
19 |
+
"f1": 0.646299,
|
20 |
+
"f1_weighted": 0.686165
|
21 |
+
},
|
22 |
+
{
|
23 |
+
"accuracy": 0.647949,
|
24 |
+
"f1": 0.61021,
|
25 |
+
"f1_weighted": 0.638677
|
26 |
+
},
|
27 |
+
{
|
28 |
+
"accuracy": 0.679892,
|
29 |
+
"f1": 0.636426,
|
30 |
+
"f1_weighted": 0.675901
|
31 |
+
},
|
32 |
+
{
|
33 |
+
"accuracy": 0.659045,
|
34 |
+
"f1": 0.615929,
|
35 |
+
"f1_weighted": 0.650105
|
36 |
+
},
|
37 |
+
{
|
38 |
+
"accuracy": 0.647276,
|
39 |
+
"f1": 0.623844,
|
40 |
+
"f1_weighted": 0.631997
|
41 |
+
},
|
42 |
+
{
|
43 |
+
"accuracy": 0.685272,
|
44 |
+
"f1": 0.644456,
|
45 |
+
"f1_weighted": 0.676451
|
46 |
+
},
|
47 |
+
{
|
48 |
+
"accuracy": 0.66577,
|
49 |
+
"f1": 0.625479,
|
50 |
+
"f1_weighted": 0.660331
|
51 |
+
},
|
52 |
+
{
|
53 |
+
"accuracy": 0.647613,
|
54 |
+
"f1": 0.598575,
|
55 |
+
"f1_weighted": 0.627649
|
56 |
+
},
|
57 |
+
{
|
58 |
+
"accuracy": 0.680901,
|
59 |
+
"f1": 0.644315,
|
60 |
+
"f1_weighted": 0.682058
|
61 |
+
}
|
62 |
+
],
|
63 |
+
"main_score": 0.668964,
|
64 |
+
"hf_subset": "en",
|
65 |
+
"languages": [
|
66 |
+
"eng-Latn"
|
67 |
+
]
|
68 |
+
}
|
69 |
+
]
|
70 |
+
},
|
71 |
+
"evaluation_time": 55.509772062301636,
|
72 |
+
"kg_co2_emissions": null
|
73 |
+
}
|
results/voyageai__voyage-2/1/MassiveScenarioClassification.json
ADDED
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "fad2c6e8459f9e1c45d9315f4953d921437d70f8",
|
3 |
+
"task_name": "MassiveScenarioClassification",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"accuracy": 0.740282,
|
9 |
+
"f1": 0.73159,
|
10 |
+
"f1_weighted": 0.737893,
|
11 |
+
"scores_per_experiment": [
|
12 |
+
{
|
13 |
+
"accuracy": 0.770679,
|
14 |
+
"f1": 0.763439,
|
15 |
+
"f1_weighted": 0.770041
|
16 |
+
},
|
17 |
+
{
|
18 |
+
"accuracy": 0.740753,
|
19 |
+
"f1": 0.733912,
|
20 |
+
"f1_weighted": 0.73807
|
21 |
+
},
|
22 |
+
{
|
23 |
+
"accuracy": 0.747142,
|
24 |
+
"f1": 0.732102,
|
25 |
+
"f1_weighted": 0.740348
|
26 |
+
},
|
27 |
+
{
|
28 |
+
"accuracy": 0.754876,
|
29 |
+
"f1": 0.74453,
|
30 |
+
"f1_weighted": 0.756729
|
31 |
+
},
|
32 |
+
{
|
33 |
+
"accuracy": 0.726295,
|
34 |
+
"f1": 0.719493,
|
35 |
+
"f1_weighted": 0.718786
|
36 |
+
},
|
37 |
+
{
|
38 |
+
"accuracy": 0.736046,
|
39 |
+
"f1": 0.725717,
|
40 |
+
"f1_weighted": 0.730874
|
41 |
+
},
|
42 |
+
{
|
43 |
+
"accuracy": 0.730666,
|
44 |
+
"f1": 0.722118,
|
45 |
+
"f1_weighted": 0.728823
|
46 |
+
},
|
47 |
+
{
|
48 |
+
"accuracy": 0.726967,
|
49 |
+
"f1": 0.718518,
|
50 |
+
"f1_weighted": 0.728441
|
51 |
+
},
|
52 |
+
{
|
53 |
+
"accuracy": 0.733356,
|
54 |
+
"f1": 0.72568,
|
55 |
+
"f1_weighted": 0.731405
|
56 |
+
},
|
57 |
+
{
|
58 |
+
"accuracy": 0.736046,
|
59 |
+
"f1": 0.730393,
|
60 |
+
"f1_weighted": 0.735409
|
61 |
+
}
|
62 |
+
],
|
63 |
+
"main_score": 0.740282,
|
64 |
+
"hf_subset": "en",
|
65 |
+
"languages": [
|
66 |
+
"eng-Latn"
|
67 |
+
]
|
68 |
+
}
|
69 |
+
]
|
70 |
+
},
|
71 |
+
"evaluation_time": 37.93454885482788,
|
72 |
+
"kg_co2_emissions": null
|
73 |
+
}
|
results/voyageai__voyage-2/1/MedrxivClusteringP2P.v2.json
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "e7a26af6f3ae46b30dde8737f02c07b1505bcc73",
|
3 |
+
"task_name": "MedrxivClusteringP2P.v2",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"v_measures": {
|
9 |
+
"Level 0": [
|
10 |
+
0.375416,
|
11 |
+
0.351325,
|
12 |
+
0.36872,
|
13 |
+
0.360263,
|
14 |
+
0.356931,
|
15 |
+
0.364946,
|
16 |
+
0.366107,
|
17 |
+
0.36503,
|
18 |
+
0.360284,
|
19 |
+
0.362478
|
20 |
+
]
|
21 |
+
},
|
22 |
+
"v_measure": 0.36315,
|
23 |
+
"v_measure_std": 0.006265,
|
24 |
+
"main_score": 0.36315,
|
25 |
+
"hf_subset": "default",
|
26 |
+
"languages": [
|
27 |
+
"eng-Latn"
|
28 |
+
]
|
29 |
+
}
|
30 |
+
]
|
31 |
+
},
|
32 |
+
"evaluation_time": 144.58351016044617,
|
33 |
+
"kg_co2_emissions": null
|
34 |
+
}
|
results/voyageai__voyage-2/1/MedrxivClusteringS2S.v2.json
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "35191c8c0dca72d8ff3efcd72aa802307d469663",
|
3 |
+
"task_name": "MedrxivClusteringS2S.v2",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"v_measures": {
|
9 |
+
"Level 0": [
|
10 |
+
0.353632,
|
11 |
+
0.349945,
|
12 |
+
0.356038,
|
13 |
+
0.346906,
|
14 |
+
0.353443,
|
15 |
+
0.339642,
|
16 |
+
0.344365,
|
17 |
+
0.340969,
|
18 |
+
0.361607,
|
19 |
+
0.342834
|
20 |
+
]
|
21 |
+
},
|
22 |
+
"v_measure": 0.348938,
|
23 |
+
"v_measure_std": 0.00683,
|
24 |
+
"main_score": 0.348938,
|
25 |
+
"hf_subset": "default",
|
26 |
+
"languages": [
|
27 |
+
"eng-Latn"
|
28 |
+
]
|
29 |
+
}
|
30 |
+
]
|
31 |
+
},
|
32 |
+
"evaluation_time": 19.437555074691772,
|
33 |
+
"kg_co2_emissions": null
|
34 |
+
}
|
results/voyageai__voyage-2/1/SICK-R.json
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "20a6d6f312dd54037fe07a32d58e5e168867909d",
|
3 |
+
"task_name": "SICK-R",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"pearson": 0.846713,
|
9 |
+
"spearman": 0.794471,
|
10 |
+
"cosine_pearson": 0.846713,
|
11 |
+
"cosine_spearman": 0.794471,
|
12 |
+
"manhattan_pearson": 0.828763,
|
13 |
+
"manhattan_spearman": 0.794455,
|
14 |
+
"euclidean_pearson": 0.828946,
|
15 |
+
"euclidean_spearman": 0.794471,
|
16 |
+
"main_score": 0.794471,
|
17 |
+
"hf_subset": "default",
|
18 |
+
"languages": [
|
19 |
+
"eng-Latn"
|
20 |
+
]
|
21 |
+
}
|
22 |
+
]
|
23 |
+
},
|
24 |
+
"evaluation_time": 136.5992350578308,
|
25 |
+
"kg_co2_emissions": null
|
26 |
+
}
|
results/voyageai__voyage-2/1/STS12.json
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "a0d554a64d88156834ff5ae9920b964011b16384",
|
3 |
+
"task_name": "STS12",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"pearson": 0.744855,
|
9 |
+
"spearman": 0.710071,
|
10 |
+
"cosine_pearson": 0.744855,
|
11 |
+
"cosine_spearman": 0.710071,
|
12 |
+
"manhattan_pearson": 0.714259,
|
13 |
+
"manhattan_spearman": 0.710338,
|
14 |
+
"euclidean_pearson": 0.71424,
|
15 |
+
"euclidean_spearman": 0.710071,
|
16 |
+
"main_score": 0.710071,
|
17 |
+
"hf_subset": "default",
|
18 |
+
"languages": [
|
19 |
+
"eng-Latn"
|
20 |
+
]
|
21 |
+
}
|
22 |
+
]
|
23 |
+
},
|
24 |
+
"evaluation_time": 46.08036422729492,
|
25 |
+
"kg_co2_emissions": null
|
26 |
+
}
|
results/voyageai__voyage-2/1/STS13.json
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "7e90230a92c190f1bf69ae9002b8cea547a64cca",
|
3 |
+
"task_name": "STS13",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"pearson": 0.776081,
|
9 |
+
"spearman": 0.805375,
|
10 |
+
"cosine_pearson": 0.776081,
|
11 |
+
"cosine_spearman": 0.805375,
|
12 |
+
"manhattan_pearson": 0.80085,
|
13 |
+
"manhattan_spearman": 0.805385,
|
14 |
+
"euclidean_pearson": 0.80085,
|
15 |
+
"euclidean_spearman": 0.805375,
|
16 |
+
"main_score": 0.805375,
|
17 |
+
"hf_subset": "default",
|
18 |
+
"languages": [
|
19 |
+
"eng-Latn"
|
20 |
+
]
|
21 |
+
}
|
22 |
+
]
|
23 |
+
},
|
24 |
+
"evaluation_time": 21.828990936279297,
|
25 |
+
"kg_co2_emissions": null
|
26 |
+
}
|
results/voyageai__voyage-2/1/STS14.json
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "6031580fec1f6af667f0bd2da0a551cf4f0b2375",
|
3 |
+
"task_name": "STS14",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"pearson": 0.773071,
|
9 |
+
"spearman": 0.74775,
|
10 |
+
"cosine_pearson": 0.773071,
|
11 |
+
"cosine_spearman": 0.74775,
|
12 |
+
"manhattan_pearson": 0.76706,
|
13 |
+
"manhattan_spearman": 0.747219,
|
14 |
+
"euclidean_pearson": 0.767439,
|
15 |
+
"euclidean_spearman": 0.74775,
|
16 |
+
"main_score": 0.74775,
|
17 |
+
"hf_subset": "default",
|
18 |
+
"languages": [
|
19 |
+
"eng-Latn"
|
20 |
+
]
|
21 |
+
}
|
22 |
+
]
|
23 |
+
},
|
24 |
+
"evaluation_time": 55.173203229904175,
|
25 |
+
"kg_co2_emissions": null
|
26 |
+
}
|
results/voyageai__voyage-2/1/STS15.json
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "ae752c7c21bf194d8b67fd573edf7ae58183cbe3",
|
3 |
+
"task_name": "STS15",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"pearson": 0.818378,
|
9 |
+
"spearman": 0.836444,
|
10 |
+
"cosine_pearson": 0.818378,
|
11 |
+
"cosine_spearman": 0.836444,
|
12 |
+
"manhattan_pearson": 0.832263,
|
13 |
+
"manhattan_spearman": 0.836173,
|
14 |
+
"euclidean_pearson": 0.832647,
|
15 |
+
"euclidean_spearman": 0.836444,
|
16 |
+
"main_score": 0.836444,
|
17 |
+
"hf_subset": "default",
|
18 |
+
"languages": [
|
19 |
+
"eng-Latn"
|
20 |
+
]
|
21 |
+
}
|
22 |
+
]
|
23 |
+
},
|
24 |
+
"evaluation_time": 51.47369694709778,
|
25 |
+
"kg_co2_emissions": null
|
26 |
+
}
|
results/voyageai__voyage-2/1/STS17.json
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "faeb762787bd10488a50c8b5be4a3b82e411949c",
|
3 |
+
"task_name": "STS17",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"pearson": 0.87149,
|
9 |
+
"spearman": 0.891938,
|
10 |
+
"cosine_pearson": 0.87149,
|
11 |
+
"cosine_spearman": 0.891938,
|
12 |
+
"manhattan_pearson": 0.890632,
|
13 |
+
"manhattan_spearman": 0.891213,
|
14 |
+
"euclidean_pearson": 0.891376,
|
15 |
+
"euclidean_spearman": 0.891938,
|
16 |
+
"main_score": 0.891938,
|
17 |
+
"hf_subset": "en-en",
|
18 |
+
"languages": [
|
19 |
+
"eng-Latn"
|
20 |
+
]
|
21 |
+
}
|
22 |
+
]
|
23 |
+
},
|
24 |
+
"evaluation_time": 3.96331524848938,
|
25 |
+
"kg_co2_emissions": null
|
26 |
+
}
|
results/voyageai__voyage-2/1/STS22.v2.json
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "d31f33a128469b20e357535c39b82fb3c3f6f2bd",
|
3 |
+
"task_name": "STS22.v2",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"pearson": 0.654151,
|
9 |
+
"spearman": 0.6539,
|
10 |
+
"cosine_pearson": 0.654151,
|
11 |
+
"cosine_spearman": 0.6539,
|
12 |
+
"manhattan_pearson": 0.665552,
|
13 |
+
"manhattan_spearman": 0.657051,
|
14 |
+
"euclidean_pearson": 0.665394,
|
15 |
+
"euclidean_spearman": 0.6539,
|
16 |
+
"main_score": 0.6539,
|
17 |
+
"hf_subset": "en",
|
18 |
+
"languages": [
|
19 |
+
"eng-Latn"
|
20 |
+
]
|
21 |
+
}
|
22 |
+
]
|
23 |
+
},
|
24 |
+
"evaluation_time": 67.85761308670044,
|
25 |
+
"kg_co2_emissions": null
|
26 |
+
}
|
results/voyageai__voyage-2/1/STSBenchmark.json
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "b0fddb56ed78048fa8b90373c8a3cfc37b684831",
|
3 |
+
"task_name": "STSBenchmark",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"pearson": 0.820552,
|
9 |
+
"spearman": 0.830537,
|
10 |
+
"cosine_pearson": 0.820552,
|
11 |
+
"cosine_spearman": 0.830537,
|
12 |
+
"manhattan_pearson": 0.832017,
|
13 |
+
"manhattan_spearman": 0.829835,
|
14 |
+
"euclidean_pearson": 0.832651,
|
15 |
+
"euclidean_spearman": 0.830537,
|
16 |
+
"main_score": 0.830537,
|
17 |
+
"hf_subset": "default",
|
18 |
+
"languages": [
|
19 |
+
"eng-Latn"
|
20 |
+
]
|
21 |
+
}
|
22 |
+
]
|
23 |
+
},
|
24 |
+
"evaluation_time": 20.79955816268921,
|
25 |
+
"kg_co2_emissions": null
|
26 |
+
}
|
results/voyageai__voyage-2/1/SprintDuplicateQuestions.json
ADDED
@@ -0,0 +1,58 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46",
|
3 |
+
"task_name": "SprintDuplicateQuestions",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"similarity_accuracy": 0.997436,
|
9 |
+
"similarity_accuracy_threshold": 0.927648,
|
10 |
+
"similarity_f1": 0.869695,
|
11 |
+
"similarity_f1_threshold": 0.925368,
|
12 |
+
"similarity_precision": 0.868395,
|
13 |
+
"similarity_recall": 0.871,
|
14 |
+
"similarity_ap": 0.932768,
|
15 |
+
"cosine_accuracy": 0.997436,
|
16 |
+
"cosine_accuracy_threshold": 0.927648,
|
17 |
+
"cosine_f1": 0.869695,
|
18 |
+
"cosine_f1_threshold": 0.925368,
|
19 |
+
"cosine_precision": 0.868395,
|
20 |
+
"cosine_recall": 0.871,
|
21 |
+
"cosine_ap": 0.932768,
|
22 |
+
"manhattan_accuracy": 0.997436,
|
23 |
+
"manhattan_accuracy_threshold": 9.696098,
|
24 |
+
"manhattan_f1": 0.869781,
|
25 |
+
"manhattan_f1_threshold": 9.898817,
|
26 |
+
"manhattan_precision": 0.864625,
|
27 |
+
"manhattan_recall": 0.875,
|
28 |
+
"manhattan_ap": 0.932547,
|
29 |
+
"euclidean_accuracy": 0.997436,
|
30 |
+
"euclidean_accuracy_threshold": 0.3804,
|
31 |
+
"euclidean_f1": 0.869695,
|
32 |
+
"euclidean_f1_threshold": 0.386348,
|
33 |
+
"euclidean_precision": 0.868395,
|
34 |
+
"euclidean_recall": 0.871,
|
35 |
+
"euclidean_ap": 0.932768,
|
36 |
+
"dot_accuracy": 0.997436,
|
37 |
+
"dot_accuracy_threshold": 0.927648,
|
38 |
+
"dot_f1": 0.869695,
|
39 |
+
"dot_f1_threshold": 0.925368,
|
40 |
+
"dot_precision": 0.868395,
|
41 |
+
"dot_recall": 0.871,
|
42 |
+
"dot_ap": 0.932768,
|
43 |
+
"max_accuracy": 0.997436,
|
44 |
+
"max_f1": 0.869781,
|
45 |
+
"max_precision": 0.868395,
|
46 |
+
"max_recall": 0.875,
|
47 |
+
"max_ap": 0.932768,
|
48 |
+
"main_score": 0.932768,
|
49 |
+
"hf_subset": "default",
|
50 |
+
"languages": [
|
51 |
+
"eng-Latn"
|
52 |
+
]
|
53 |
+
}
|
54 |
+
]
|
55 |
+
},
|
56 |
+
"evaluation_time": 69.74615001678467,
|
57 |
+
"kg_co2_emissions": null
|
58 |
+
}
|
results/voyageai__voyage-2/1/StackExchangeClustering.v2.json
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "6cbc1f7b2bc0622f2e39d2c77fa502909748c259",
|
3 |
+
"task_name": "StackExchangeClustering.v2",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"v_measures": {
|
9 |
+
"Level 0": [
|
10 |
+
0.559069,
|
11 |
+
0.548696,
|
12 |
+
0.581781,
|
13 |
+
0.567124,
|
14 |
+
0.5694,
|
15 |
+
0.546069,
|
16 |
+
0.553066,
|
17 |
+
0.554318,
|
18 |
+
0.564776,
|
19 |
+
0.574174
|
20 |
+
]
|
21 |
+
},
|
22 |
+
"v_measure": 0.561847,
|
23 |
+
"v_measure_std": 0.010983,
|
24 |
+
"main_score": 0.561847,
|
25 |
+
"hf_subset": "default",
|
26 |
+
"languages": [
|
27 |
+
"eng-Latn"
|
28 |
+
]
|
29 |
+
}
|
30 |
+
]
|
31 |
+
},
|
32 |
+
"evaluation_time": 19.704190969467163,
|
33 |
+
"kg_co2_emissions": null
|
34 |
+
}
|
results/voyageai__voyage-2/1/StackExchangeClusteringP2P.v2.json
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "815ca46b2622cec33ccafc3735d572c266efdb44",
|
3 |
+
"task_name": "StackExchangeClusteringP2P.v2",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"v_measures": {
|
9 |
+
"Level 0": [
|
10 |
+
0.42374,
|
11 |
+
0.409094,
|
12 |
+
0.425081,
|
13 |
+
0.422455,
|
14 |
+
0.416774,
|
15 |
+
0.429298,
|
16 |
+
0.427029,
|
17 |
+
0.417227,
|
18 |
+
0.416926,
|
19 |
+
0.429391
|
20 |
+
]
|
21 |
+
},
|
22 |
+
"v_measure": 0.421701,
|
23 |
+
"v_measure_std": 0.006224,
|
24 |
+
"main_score": 0.421701,
|
25 |
+
"hf_subset": "default",
|
26 |
+
"languages": [
|
27 |
+
"eng-Latn"
|
28 |
+
]
|
29 |
+
}
|
30 |
+
]
|
31 |
+
},
|
32 |
+
"evaluation_time": 251.8900909423828,
|
33 |
+
"kg_co2_emissions": null
|
34 |
+
}
|
results/voyageai__voyage-2/1/SummEvalSummarization.v2.json
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "cda12ad7615edc362dbf25a00fdd61d3b1eaf93c",
|
3 |
+
"task_name": "SummEvalSummarization.v2",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"pearson": 0.321953,
|
9 |
+
"spearman": 0.27845,
|
10 |
+
"cosine_spearman": 0.27845,
|
11 |
+
"cosine_pearson": 0.321953,
|
12 |
+
"dot_spearman": 0.27845,
|
13 |
+
"dot_pearson": 0.321953,
|
14 |
+
"main_score": 0.27845,
|
15 |
+
"hf_subset": "default",
|
16 |
+
"languages": [
|
17 |
+
"eng-Latn"
|
18 |
+
]
|
19 |
+
}
|
20 |
+
]
|
21 |
+
},
|
22 |
+
"evaluation_time": 40.55940508842468,
|
23 |
+
"kg_co2_emissions": null
|
24 |
+
}
|
results/voyageai__voyage-2/1/ToxicConversationsClassification.json
ADDED
@@ -0,0 +1,95 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "edfaf9da55d3dd50d43143d90c1ac476895ae6de",
|
3 |
+
"task_name": "ToxicConversationsClassification",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"accuracy": 0.649756,
|
9 |
+
"f1": 0.500659,
|
10 |
+
"f1_weighted": 0.726326,
|
11 |
+
"ap": 0.119917,
|
12 |
+
"ap_weighted": 0.119917,
|
13 |
+
"scores_per_experiment": [
|
14 |
+
{
|
15 |
+
"accuracy": 0.70752,
|
16 |
+
"f1": 0.531645,
|
17 |
+
"f1_weighted": 0.773245,
|
18 |
+
"ap": 0.123783,
|
19 |
+
"ap_weighted": 0.123783
|
20 |
+
},
|
21 |
+
{
|
22 |
+
"accuracy": 0.696777,
|
23 |
+
"f1": 0.522619,
|
24 |
+
"f1_weighted": 0.765343,
|
25 |
+
"ap": 0.118554,
|
26 |
+
"ap_weighted": 0.118554
|
27 |
+
},
|
28 |
+
{
|
29 |
+
"accuracy": 0.750488,
|
30 |
+
"f1": 0.562242,
|
31 |
+
"f1_weighted": 0.803892,
|
32 |
+
"ap": 0.1387,
|
33 |
+
"ap_weighted": 0.1387
|
34 |
+
},
|
35 |
+
{
|
36 |
+
"accuracy": 0.781738,
|
37 |
+
"f1": 0.59681,
|
38 |
+
"f1_weighted": 0.82667,
|
39 |
+
"ap": 0.168484,
|
40 |
+
"ap_weighted": 0.168484
|
41 |
+
},
|
42 |
+
{
|
43 |
+
"accuracy": 0.491699,
|
44 |
+
"f1": 0.409276,
|
45 |
+
"f1_weighted": 0.595024,
|
46 |
+
"ap": 0.100641,
|
47 |
+
"ap_weighted": 0.100641
|
48 |
+
},
|
49 |
+
{
|
50 |
+
"accuracy": 0.576172,
|
51 |
+
"f1": 0.451631,
|
52 |
+
"f1_weighted": 0.671619,
|
53 |
+
"ap": 0.099871,
|
54 |
+
"ap_weighted": 0.099871
|
55 |
+
},
|
56 |
+
{
|
57 |
+
"accuracy": 0.728516,
|
58 |
+
"f1": 0.534587,
|
59 |
+
"f1_weighted": 0.787486,
|
60 |
+
"ap": 0.116673,
|
61 |
+
"ap_weighted": 0.116673
|
62 |
+
},
|
63 |
+
{
|
64 |
+
"accuracy": 0.55127,
|
65 |
+
"f1": 0.442383,
|
66 |
+
"f1_weighted": 0.649809,
|
67 |
+
"ap": 0.103346,
|
68 |
+
"ap_weighted": 0.103346
|
69 |
+
},
|
70 |
+
{
|
71 |
+
"accuracy": 0.562012,
|
72 |
+
"f1": 0.452781,
|
73 |
+
"f1_weighted": 0.658588,
|
74 |
+
"ap": 0.110002,
|
75 |
+
"ap_weighted": 0.110002
|
76 |
+
},
|
77 |
+
{
|
78 |
+
"accuracy": 0.651367,
|
79 |
+
"f1": 0.502612,
|
80 |
+
"f1_weighted": 0.731589,
|
81 |
+
"ap": 0.119118,
|
82 |
+
"ap_weighted": 0.119118
|
83 |
+
}
|
84 |
+
],
|
85 |
+
"main_score": 0.649756,
|
86 |
+
"hf_subset": "default",
|
87 |
+
"languages": [
|
88 |
+
"eng-Latn"
|
89 |
+
]
|
90 |
+
}
|
91 |
+
]
|
92 |
+
},
|
93 |
+
"evaluation_time": 65.01656985282898,
|
94 |
+
"kg_co2_emissions": null
|
95 |
+
}
|
results/voyageai__voyage-2/1/TweetSentimentExtractionClassification.json
ADDED
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "d604517c81ca91fe16a244d1248fc021f9ecee7a",
|
3 |
+
"task_name": "TweetSentimentExtractionClassification",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"accuracy": 0.567968,
|
9 |
+
"f1": 0.568612,
|
10 |
+
"f1_weighted": 0.558348,
|
11 |
+
"scores_per_experiment": [
|
12 |
+
{
|
13 |
+
"accuracy": 0.577816,
|
14 |
+
"f1": 0.578666,
|
15 |
+
"f1_weighted": 0.569619
|
16 |
+
},
|
17 |
+
{
|
18 |
+
"accuracy": 0.558008,
|
19 |
+
"f1": 0.559777,
|
20 |
+
"f1_weighted": 0.549513
|
21 |
+
},
|
22 |
+
{
|
23 |
+
"accuracy": 0.576967,
|
24 |
+
"f1": 0.580279,
|
25 |
+
"f1_weighted": 0.571986
|
26 |
+
},
|
27 |
+
{
|
28 |
+
"accuracy": 0.583758,
|
29 |
+
"f1": 0.586647,
|
30 |
+
"f1_weighted": 0.578427
|
31 |
+
},
|
32 |
+
{
|
33 |
+
"accuracy": 0.577816,
|
34 |
+
"f1": 0.581046,
|
35 |
+
"f1_weighted": 0.574979
|
36 |
+
},
|
37 |
+
{
|
38 |
+
"accuracy": 0.599038,
|
39 |
+
"f1": 0.59894,
|
40 |
+
"f1_weighted": 0.587405
|
41 |
+
},
|
42 |
+
{
|
43 |
+
"accuracy": 0.529145,
|
44 |
+
"f1": 0.52143,
|
45 |
+
"f1_weighted": 0.507077
|
46 |
+
},
|
47 |
+
{
|
48 |
+
"accuracy": 0.581777,
|
49 |
+
"f1": 0.583634,
|
50 |
+
"f1_weighted": 0.573549
|
51 |
+
},
|
52 |
+
{
|
53 |
+
"accuracy": 0.546689,
|
54 |
+
"f1": 0.542978,
|
55 |
+
"f1_weighted": 0.528275
|
56 |
+
},
|
57 |
+
{
|
58 |
+
"accuracy": 0.54867,
|
59 |
+
"f1": 0.552718,
|
60 |
+
"f1_weighted": 0.542651
|
61 |
+
}
|
62 |
+
],
|
63 |
+
"main_score": 0.567968,
|
64 |
+
"hf_subset": "default",
|
65 |
+
"languages": [
|
66 |
+
"eng-Latn"
|
67 |
+
]
|
68 |
+
}
|
69 |
+
]
|
70 |
+
},
|
71 |
+
"evaluation_time": 49.486531019210815,
|
72 |
+
"kg_co2_emissions": null
|
73 |
+
}
|
results/voyageai__voyage-2/1/TwentyNewsgroupsClustering.v2.json
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "6125ec4e24fa026cec8a478383ee943acfbd5449",
|
3 |
+
"task_name": "TwentyNewsgroupsClustering.v2",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"v_measures": {
|
9 |
+
"Level 0": [
|
10 |
+
0.501089,
|
11 |
+
0.484573,
|
12 |
+
0.504964,
|
13 |
+
0.46756,
|
14 |
+
0.464042,
|
15 |
+
0.455967,
|
16 |
+
0.49656,
|
17 |
+
0.479456,
|
18 |
+
0.452295,
|
19 |
+
0.515964
|
20 |
+
]
|
21 |
+
},
|
22 |
+
"v_measure": 0.482247,
|
23 |
+
"v_measure_std": 0.020883,
|
24 |
+
"main_score": 0.482247,
|
25 |
+
"hf_subset": "default",
|
26 |
+
"languages": [
|
27 |
+
"eng-Latn"
|
28 |
+
]
|
29 |
+
}
|
30 |
+
]
|
31 |
+
},
|
32 |
+
"evaluation_time": 21.628643035888672,
|
33 |
+
"kg_co2_emissions": null
|
34 |
+
}
|
results/voyageai__voyage-2/1/TwitterSemEval2015.json
ADDED
@@ -0,0 +1,58 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "70970daeab8776df92f5ea462b6173c0b46fd2d1",
|
3 |
+
"task_name": "TwitterSemEval2015",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"similarity_accuracy": 0.83841,
|
9 |
+
"similarity_accuracy_threshold": 0.939993,
|
10 |
+
"similarity_f1": 0.623035,
|
11 |
+
"similarity_f1_threshold": 0.926825,
|
12 |
+
"similarity_precision": 0.560811,
|
13 |
+
"similarity_recall": 0.700792,
|
14 |
+
"similarity_ap": 0.673961,
|
15 |
+
"cosine_accuracy": 0.83841,
|
16 |
+
"cosine_accuracy_threshold": 0.939993,
|
17 |
+
"cosine_f1": 0.623035,
|
18 |
+
"cosine_f1_threshold": 0.926825,
|
19 |
+
"cosine_precision": 0.560811,
|
20 |
+
"cosine_recall": 0.700792,
|
21 |
+
"cosine_ap": 0.673961,
|
22 |
+
"manhattan_accuracy": 0.838469,
|
23 |
+
"manhattan_accuracy_threshold": 8.698231,
|
24 |
+
"manhattan_f1": 0.622206,
|
25 |
+
"manhattan_f1_threshold": 9.691168,
|
26 |
+
"manhattan_precision": 0.571271,
|
27 |
+
"manhattan_recall": 0.683113,
|
28 |
+
"manhattan_ap": 0.67382,
|
29 |
+
"euclidean_accuracy": 0.83841,
|
30 |
+
"euclidean_accuracy_threshold": 0.34643,
|
31 |
+
"euclidean_f1": 0.623035,
|
32 |
+
"euclidean_f1_threshold": 0.382557,
|
33 |
+
"euclidean_precision": 0.560811,
|
34 |
+
"euclidean_recall": 0.700792,
|
35 |
+
"euclidean_ap": 0.673961,
|
36 |
+
"dot_accuracy": 0.83841,
|
37 |
+
"dot_accuracy_threshold": 0.939993,
|
38 |
+
"dot_f1": 0.623035,
|
39 |
+
"dot_f1_threshold": 0.926825,
|
40 |
+
"dot_precision": 0.560811,
|
41 |
+
"dot_recall": 0.700792,
|
42 |
+
"dot_ap": 0.673961,
|
43 |
+
"max_accuracy": 0.838469,
|
44 |
+
"max_f1": 0.623035,
|
45 |
+
"max_precision": 0.571271,
|
46 |
+
"max_recall": 0.700792,
|
47 |
+
"max_ap": 0.673961,
|
48 |
+
"main_score": 0.673961,
|
49 |
+
"hf_subset": "default",
|
50 |
+
"languages": [
|
51 |
+
"eng-Latn"
|
52 |
+
]
|
53 |
+
}
|
54 |
+
]
|
55 |
+
},
|
56 |
+
"evaluation_time": 127.13278985023499,
|
57 |
+
"kg_co2_emissions": null
|
58 |
+
}
|
results/voyageai__voyage-2/1/TwitterURLCorpus.json
ADDED
@@ -0,0 +1,58 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "8b6510b0b1fa4e4c4f879467980e9be563ec1cdf",
|
3 |
+
"task_name": "TwitterURLCorpus",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"similarity_accuracy": 0.886619,
|
9 |
+
"similarity_accuracy_threshold": 0.915378,
|
10 |
+
"similarity_f1": 0.774674,
|
11 |
+
"similarity_f1_threshold": 0.906933,
|
12 |
+
"similarity_precision": 0.742966,
|
13 |
+
"similarity_recall": 0.809209,
|
14 |
+
"similarity_ap": 0.855549,
|
15 |
+
"cosine_accuracy": 0.886619,
|
16 |
+
"cosine_accuracy_threshold": 0.915378,
|
17 |
+
"cosine_f1": 0.774674,
|
18 |
+
"cosine_f1_threshold": 0.906933,
|
19 |
+
"cosine_precision": 0.742966,
|
20 |
+
"cosine_recall": 0.809209,
|
21 |
+
"cosine_ap": 0.855549,
|
22 |
+
"manhattan_accuracy": 0.88689,
|
23 |
+
"manhattan_accuracy_threshold": 10.514816,
|
24 |
+
"manhattan_f1": 0.774695,
|
25 |
+
"manhattan_f1_threshold": 10.809466,
|
26 |
+
"manhattan_precision": 0.763825,
|
27 |
+
"manhattan_recall": 0.785879,
|
28 |
+
"manhattan_ap": 0.855395,
|
29 |
+
"euclidean_accuracy": 0.886619,
|
30 |
+
"euclidean_accuracy_threshold": 0.411392,
|
31 |
+
"euclidean_f1": 0.774674,
|
32 |
+
"euclidean_f1_threshold": 0.431433,
|
33 |
+
"euclidean_precision": 0.742966,
|
34 |
+
"euclidean_recall": 0.809209,
|
35 |
+
"euclidean_ap": 0.855549,
|
36 |
+
"dot_accuracy": 0.886619,
|
37 |
+
"dot_accuracy_threshold": 0.915378,
|
38 |
+
"dot_f1": 0.774674,
|
39 |
+
"dot_f1_threshold": 0.906933,
|
40 |
+
"dot_precision": 0.742966,
|
41 |
+
"dot_recall": 0.809209,
|
42 |
+
"dot_ap": 0.855549,
|
43 |
+
"max_accuracy": 0.88689,
|
44 |
+
"max_f1": 0.774695,
|
45 |
+
"max_precision": 0.763825,
|
46 |
+
"max_recall": 0.809209,
|
47 |
+
"max_ap": 0.855549,
|
48 |
+
"main_score": 0.855549,
|
49 |
+
"hf_subset": "default",
|
50 |
+
"languages": [
|
51 |
+
"eng-Latn"
|
52 |
+
]
|
53 |
+
}
|
54 |
+
]
|
55 |
+
},
|
56 |
+
"evaluation_time": 418.784104347229,
|
57 |
+
"kg_co2_emissions": null
|
58 |
+
}
|
results/voyageai__voyage-2/1/model_meta.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"name": "voyageai/voyage-2", "revision": "1", "release_date": "2023-10-29", "languages": null, "n_parameters": null, "memory_usage_mb": null, "max_tokens": 4000.0, "embed_dim": 1024, "license": null, "open_weights": false, "public_training_code": null, "public_training_data": null, "framework": ["API"], "reference": "https://blog.voyageai.com/2023/10/29/voyage-embeddings/", "similarity_fn_name": "cosine", "use_instructions": true, "training_datasets": {}, "adapted_from": null, "superseded_by": null, "is_cross_encoder": null, "modalities": ["text"], "loader": "VoyageWrapper"}
|
results/voyageai__voyage-3-lite/1/STS22.json
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3",
|
3 |
+
"task_name": "STS22",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"pearson": 0.6695,
|
9 |
+
"spearman": 0.669074,
|
10 |
+
"cosine_pearson": 0.6695,
|
11 |
+
"cosine_spearman": 0.669074,
|
12 |
+
"manhattan_pearson": 0.685775,
|
13 |
+
"manhattan_spearman": 0.674971,
|
14 |
+
"euclidean_pearson": 0.682613,
|
15 |
+
"euclidean_spearman": 0.669074,
|
16 |
+
"main_score": 0.669074,
|
17 |
+
"hf_subset": "en",
|
18 |
+
"languages": [
|
19 |
+
"eng-Latn"
|
20 |
+
]
|
21 |
+
}
|
22 |
+
]
|
23 |
+
},
|
24 |
+
"evaluation_time": 9.082679986953735,
|
25 |
+
"kg_co2_emissions": null
|
26 |
+
}
|
results/voyageai__voyage-3-lite/1/model_meta.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"name": "voyageai/voyage-3-lite", "revision": "1", "release_date": "2024-09-18", "languages": null, "n_parameters": null, "memory_usage_mb": null, "max_tokens": 32000.0, "embed_dim": 512, "license": null, "open_weights": false, "public_training_code": null, "public_training_data": null, "framework": ["API"], "reference": "https://blog.voyageai.com/2024/09/18/voyage-3/", "similarity_fn_name": "cosine", "use_instructions": true, "training_datasets": {}, "adapted_from": null, "superseded_by": null, "modalities": ["text"], "loader": "VoyageWrapper"}
|
|
|
1 |
+
{"name": "voyageai/voyage-3-lite", "revision": "1", "release_date": "2024-09-18", "languages": null, "n_parameters": null, "memory_usage_mb": null, "max_tokens": 32000.0, "embed_dim": 512, "license": null, "open_weights": false, "public_training_code": null, "public_training_data": null, "framework": ["API"], "reference": "https://blog.voyageai.com/2024/09/18/voyage-3/", "similarity_fn_name": "cosine", "use_instructions": true, "training_datasets": {}, "adapted_from": null, "superseded_by": null, "is_cross_encoder": null, "modalities": ["text"], "loader": "VoyageWrapper"}
|
results/voyageai__voyage-3-m-exp/1/ArXivHierarchicalClusteringP2P.json
ADDED
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "0bbdb47bcbe3a90093699aefeed338a0f28a7ee8",
|
3 |
+
"task_name": "ArXivHierarchicalClusteringP2P",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"v_measures": {
|
9 |
+
"Level 0": [
|
10 |
+
0.504571,
|
11 |
+
0.506488,
|
12 |
+
0.560067,
|
13 |
+
0.601787,
|
14 |
+
0.61077,
|
15 |
+
0.594428,
|
16 |
+
0.567709,
|
17 |
+
0.579547,
|
18 |
+
0.549026,
|
19 |
+
0.566101
|
20 |
+
],
|
21 |
+
"Level 1": [
|
22 |
+
0.566838,
|
23 |
+
0.547598,
|
24 |
+
0.539765,
|
25 |
+
0.553095,
|
26 |
+
0.545797,
|
27 |
+
0.562666,
|
28 |
+
0.565628,
|
29 |
+
0.539637,
|
30 |
+
0.561949,
|
31 |
+
0.569363
|
32 |
+
]
|
33 |
+
},
|
34 |
+
"v_measure": 0.559641,
|
35 |
+
"v_measure_std": 0.025929,
|
36 |
+
"main_score": 0.559641,
|
37 |
+
"hf_subset": "default",
|
38 |
+
"languages": [
|
39 |
+
"eng-Latn"
|
40 |
+
]
|
41 |
+
}
|
42 |
+
]
|
43 |
+
},
|
44 |
+
"evaluation_time": 16.59666085243225,
|
45 |
+
"kg_co2_emissions": null
|
46 |
+
}
|
results/voyageai__voyage-3-m-exp/1/ArXivHierarchicalClusteringS2S.json
ADDED
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "b73bd54100e5abfa6e3a23dcafb46fe4d2438dc3",
|
3 |
+
"task_name": "ArXivHierarchicalClusteringS2S",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"v_measures": {
|
9 |
+
"Level 0": [
|
10 |
+
0.607558,
|
11 |
+
0.640236,
|
12 |
+
0.622975,
|
13 |
+
0.641163,
|
14 |
+
0.643232,
|
15 |
+
0.594824,
|
16 |
+
0.588233,
|
17 |
+
0.598607,
|
18 |
+
0.583539,
|
19 |
+
0.614635
|
20 |
+
],
|
21 |
+
"Level 1": [
|
22 |
+
0.556907,
|
23 |
+
0.591977,
|
24 |
+
0.577836,
|
25 |
+
0.583035,
|
26 |
+
0.555739,
|
27 |
+
0.563237,
|
28 |
+
0.561241,
|
29 |
+
0.607522,
|
30 |
+
0.558536,
|
31 |
+
0.582646
|
32 |
+
]
|
33 |
+
},
|
34 |
+
"v_measure": 0.593684,
|
35 |
+
"v_measure_std": 0.027579,
|
36 |
+
"main_score": 0.593684,
|
37 |
+
"hf_subset": "default",
|
38 |
+
"languages": [
|
39 |
+
"eng-Latn"
|
40 |
+
]
|
41 |
+
}
|
42 |
+
]
|
43 |
+
},
|
44 |
+
"evaluation_time": 6.130875110626221,
|
45 |
+
"kg_co2_emissions": null
|
46 |
+
}
|
results/voyageai__voyage-3-m-exp/1/BiorxivClusteringP2P.v2.json
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "f5dbc242e11dd8e24def4c4268607a49e02946dc",
|
3 |
+
"task_name": "BiorxivClusteringP2P.v2",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"v_measures": {
|
9 |
+
"Level 0": [
|
10 |
+
0.37603,
|
11 |
+
0.398675,
|
12 |
+
0.412999,
|
13 |
+
0.410248,
|
14 |
+
0.404275,
|
15 |
+
0.416576,
|
16 |
+
0.381482,
|
17 |
+
0.41971,
|
18 |
+
0.418469,
|
19 |
+
0.427124
|
20 |
+
]
|
21 |
+
},
|
22 |
+
"v_measure": 0.406559,
|
23 |
+
"v_measure_std": 0.015879,
|
24 |
+
"main_score": 0.406559,
|
25 |
+
"hf_subset": "default",
|
26 |
+
"languages": [
|
27 |
+
"eng-Latn"
|
28 |
+
]
|
29 |
+
}
|
30 |
+
]
|
31 |
+
},
|
32 |
+
"evaluation_time": 318.43674492836,
|
33 |
+
"kg_co2_emissions": null
|
34 |
+
}
|
results/voyageai__voyage-3-m-exp/1/MedrxivClusteringP2P.v2.json
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "e7a26af6f3ae46b30dde8737f02c07b1505bcc73",
|
3 |
+
"task_name": "MedrxivClusteringP2P.v2",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"v_measures": {
|
9 |
+
"Level 0": [
|
10 |
+
0.351055,
|
11 |
+
0.353192,
|
12 |
+
0.345473,
|
13 |
+
0.352441,
|
14 |
+
0.334031,
|
15 |
+
0.340297,
|
16 |
+
0.364124,
|
17 |
+
0.354706,
|
18 |
+
0.349397,
|
19 |
+
0.35705
|
20 |
+
]
|
21 |
+
},
|
22 |
+
"v_measure": 0.350177,
|
23 |
+
"v_measure_std": 0.008114,
|
24 |
+
"main_score": 0.350177,
|
25 |
+
"hf_subset": "default",
|
26 |
+
"languages": [
|
27 |
+
"eng-Latn"
|
28 |
+
]
|
29 |
+
}
|
30 |
+
]
|
31 |
+
},
|
32 |
+
"evaluation_time": 241.27944016456604,
|
33 |
+
"kg_co2_emissions": null
|
34 |
+
}
|
results/voyageai__voyage-3-m-exp/1/MedrxivClusteringS2S.v2.json
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "35191c8c0dca72d8ff3efcd72aa802307d469663",
|
3 |
+
"task_name": "MedrxivClusteringS2S.v2",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"v_measures": {
|
9 |
+
"Level 0": [
|
10 |
+
0.369853,
|
11 |
+
0.364777,
|
12 |
+
0.373058,
|
13 |
+
0.349856,
|
14 |
+
0.363737,
|
15 |
+
0.346005,
|
16 |
+
0.363008,
|
17 |
+
0.366087,
|
18 |
+
0.365885,
|
19 |
+
0.353479
|
20 |
+
]
|
21 |
+
},
|
22 |
+
"v_measure": 0.361575,
|
23 |
+
"v_measure_std": 0.008376,
|
24 |
+
"main_score": 0.361575,
|
25 |
+
"hf_subset": "default",
|
26 |
+
"languages": [
|
27 |
+
"eng-Latn"
|
28 |
+
]
|
29 |
+
}
|
30 |
+
]
|
31 |
+
},
|
32 |
+
"evaluation_time": 30.982619762420654,
|
33 |
+
"kg_co2_emissions": null
|
34 |
+
}
|
results/voyageai__voyage-3-m-exp/1/STS22.v2.json
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "d31f33a128469b20e357535c39b82fb3c3f6f2bd",
|
3 |
+
"task_name": "STS22.v2",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"pearson": 0.68873,
|
9 |
+
"spearman": 0.683449,
|
10 |
+
"cosine_pearson": 0.68873,
|
11 |
+
"cosine_spearman": 0.683449,
|
12 |
+
"manhattan_pearson": 0.693013,
|
13 |
+
"manhattan_spearman": 0.685879,
|
14 |
+
"euclidean_pearson": 0.689661,
|
15 |
+
"euclidean_spearman": 0.683449,
|
16 |
+
"main_score": 0.683449,
|
17 |
+
"hf_subset": "en",
|
18 |
+
"languages": [
|
19 |
+
"eng-Latn"
|
20 |
+
]
|
21 |
+
}
|
22 |
+
]
|
23 |
+
},
|
24 |
+
"evaluation_time": 93.40096783638,
|
25 |
+
"kg_co2_emissions": null
|
26 |
+
}
|
results/voyageai__voyage-3-m-exp/1/StackExchangeClustering.v2.json
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "6cbc1f7b2bc0622f2e39d2c77fa502909748c259",
|
3 |
+
"task_name": "StackExchangeClustering.v2",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"v_measures": {
|
9 |
+
"Level 0": [
|
10 |
+
0.552209,
|
11 |
+
0.547985,
|
12 |
+
0.567363,
|
13 |
+
0.547015,
|
14 |
+
0.561287,
|
15 |
+
0.568893,
|
16 |
+
0.577644,
|
17 |
+
0.573732,
|
18 |
+
0.57007,
|
19 |
+
0.54691
|
20 |
+
]
|
21 |
+
},
|
22 |
+
"v_measure": 0.561311,
|
23 |
+
"v_measure_std": 0.011243,
|
24 |
+
"main_score": 0.561311,
|
25 |
+
"hf_subset": "default",
|
26 |
+
"languages": [
|
27 |
+
"eng-Latn"
|
28 |
+
]
|
29 |
+
}
|
30 |
+
]
|
31 |
+
},
|
32 |
+
"evaluation_time": 39.560309410095215,
|
33 |
+
"kg_co2_emissions": null
|
34 |
+
}
|
results/voyageai__voyage-3-m-exp/1/StackExchangeClusteringP2P.v2.json
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "815ca46b2622cec33ccafc3735d572c266efdb44",
|
3 |
+
"task_name": "StackExchangeClusteringP2P.v2",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"v_measures": {
|
9 |
+
"Level 0": [
|
10 |
+
0.386565,
|
11 |
+
0.392911,
|
12 |
+
0.396824,
|
13 |
+
0.382425,
|
14 |
+
0.375146,
|
15 |
+
0.397625,
|
16 |
+
0.403074,
|
17 |
+
0.396519,
|
18 |
+
0.394397,
|
19 |
+
0.382379
|
20 |
+
]
|
21 |
+
},
|
22 |
+
"v_measure": 0.390786,
|
23 |
+
"v_measure_std": 0.008292,
|
24 |
+
"main_score": 0.390786,
|
25 |
+
"hf_subset": "default",
|
26 |
+
"languages": [
|
27 |
+
"eng-Latn"
|
28 |
+
]
|
29 |
+
}
|
30 |
+
]
|
31 |
+
},
|
32 |
+
"evaluation_time": 317.12974095344543,
|
33 |
+
"kg_co2_emissions": null
|
34 |
+
}
|
results/voyageai__voyage-3-m-exp/1/SummEvalSummarization.v2.json
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "cda12ad7615edc362dbf25a00fdd61d3b1eaf93c",
|
3 |
+
"task_name": "SummEvalSummarization.v2",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"pearson": 0.328794,
|
9 |
+
"spearman": 0.277414,
|
10 |
+
"cosine_spearman": 0.277414,
|
11 |
+
"cosine_pearson": 0.328794,
|
12 |
+
"dot_spearman": 0.277414,
|
13 |
+
"dot_pearson": 0.328794,
|
14 |
+
"main_score": 0.277414,
|
15 |
+
"hf_subset": "default",
|
16 |
+
"languages": [
|
17 |
+
"eng-Latn"
|
18 |
+
]
|
19 |
+
}
|
20 |
+
]
|
21 |
+
},
|
22 |
+
"evaluation_time": 92.6338050365448,
|
23 |
+
"kg_co2_emissions": null
|
24 |
+
}
|
results/voyageai__voyage-3-m-exp/1/TwentyNewsgroupsClustering.v2.json
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "6125ec4e24fa026cec8a478383ee943acfbd5449",
|
3 |
+
"task_name": "TwentyNewsgroupsClustering.v2",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"v_measures": {
|
9 |
+
"Level 0": [
|
10 |
+
0.451354,
|
11 |
+
0.409015,
|
12 |
+
0.458468,
|
13 |
+
0.463974,
|
14 |
+
0.501944,
|
15 |
+
0.432412,
|
16 |
+
0.477071,
|
17 |
+
0.43414,
|
18 |
+
0.4103,
|
19 |
+
0.452764
|
20 |
+
]
|
21 |
+
},
|
22 |
+
"v_measure": 0.449144,
|
23 |
+
"v_measure_std": 0.027417,
|
24 |
+
"main_score": 0.449144,
|
25 |
+
"hf_subset": "default",
|
26 |
+
"languages": [
|
27 |
+
"eng-Latn"
|
28 |
+
]
|
29 |
+
}
|
30 |
+
]
|
31 |
+
},
|
32 |
+
"evaluation_time": 37.32608604431152,
|
33 |
+
"kg_co2_emissions": null
|
34 |
+
}
|
results/voyageai__voyage-3-m-exp/1/model_meta.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"name": "voyageai/voyage-3-m-exp", "revision": "1", "release_date": "2025-01-08", "languages": ["eng-Latn"], "n_parameters":
|
|
|
1 |
+
{"name": "voyageai/voyage-3-m-exp", "revision": "1", "release_date": "2025-01-08", "languages": ["eng-Latn"], "n_parameters": 6918000000, "memory_usage_mb": null, "max_tokens": 32000.0, "embed_dim": 2048, "license": null, "open_weights": false, "public_training_code": null, "public_training_data": null, "framework": ["API"], "reference": "https://huggingface.co/voyageai/voyage-3-m-exp", "similarity_fn_name": "cosine", "use_instructions": true, "training_datasets": {"AmazonPolarityClassification": ["train"], "AmazonReviewsClassification": ["train"], "EmotionClassification": ["train"], "HotpotQA": ["train"], "ImdbClassification": ["train"], "MTOPDomainClassification": ["train"], "MTOPIntentClassification": ["train"], "MindSmallReranking": ["train"], "MassiveIntentClassification": ["train"], "MassiveScenarioClassification": ["train"], "MedrxivClusteringP2P": ["train"], "MedrxivClusteringS2S": ["train"], "STS12": ["train"], "STSBenchmark": ["train"], "StackOverflowDupQuestions": ["train"], "ToxicConversationsClassification": ["train"], "TweetSentimentExtractionClassification": ["train"], "BiorxivClusteringP2P": ["train"], "BiorxivClusteringS2S": ["train"], "Banking77Classification": ["train"], "ArguAna": ["train"], "ArguAna-PL": ["train"], "ArguAna-NL": ["train"], "NanoArguAnaRetrieval": ["train"], "STS22": ["train"], "AmazonCounterfactualClassification": ["train"], "ArxivClusteringP2P": ["train"], "ArxivClusteringS2S": ["train"], "NQ": ["train"], "SciFact": ["train"], "QuoraRetrieval": ["train"], "NanoQuoraRetrieval": ["train"], "NQHardNegatives": ["train"], "NanoNQRetrieval": ["train"], "NQ-PL": ["train"], "NQ-NL": ["train"], "NFCorpus": ["train"], "FEVERHardNegatives": ["train"], "NanoFEVERRetrieval": ["train"], "FEVER-NL": ["train"], "FiQA2018-NL": ["train"], "BiorxivClusteringP2P.v2": ["train"], "BiorxivClusteringS2S.v2": ["train"], "MedrxivClusteringP2P.v2": ["train"], "MedrxivClusteringS2S.v2": ["train"], "MSMARCO": ["train"], "MSMARCOHardNegatives": ["train"], "NanoMSMARCORetrieval": ["train"], "MSMARCO-PL": ["train"], "mMARCO-NL": ["train"], "HotpotQA-PL": ["train"], "HotpotQA-NL": ["train"], "HotpotQAHardNegatives": ["train"], "FEVER": ["train"], "FiQA2018": ["train"], "DBPedia": ["train"], "TRECCOVID": ["train"], "ArxivClusteringP2P.v2": ["train"], "STSBenchmarkMultilingualSTS": ["train"]}, "adapted_from": null, "superseded_by": null, "is_cross_encoder": null, "modalities": ["text"], "loader": "VoyageWrapper"}
|
results/voyageai__voyage-3/1/STS22.json
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3",
|
3 |
+
"task_name": "STS22",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"pearson": 0.704557,
|
9 |
+
"spearman": 0.696084,
|
10 |
+
"cosine_pearson": 0.704557,
|
11 |
+
"cosine_spearman": 0.696084,
|
12 |
+
"manhattan_pearson": 0.700972,
|
13 |
+
"manhattan_spearman": 0.696571,
|
14 |
+
"euclidean_pearson": 0.700989,
|
15 |
+
"euclidean_spearman": 0.696084,
|
16 |
+
"main_score": 0.696084,
|
17 |
+
"hf_subset": "en",
|
18 |
+
"languages": [
|
19 |
+
"eng-Latn"
|
20 |
+
]
|
21 |
+
}
|
22 |
+
]
|
23 |
+
},
|
24 |
+
"evaluation_time": 12.066434860229492,
|
25 |
+
"kg_co2_emissions": null
|
26 |
+
}
|
results/voyageai__voyage-3/1/model_meta.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"name": "voyageai/voyage-3", "revision": "1", "release_date": "2024-09-18", "languages": null, "n_parameters": null, "memory_usage_mb": null, "max_tokens": 32000.0, "embed_dim": 1024, "license": null, "open_weights": false, "public_training_code": null, "public_training_data": null, "framework": ["API"], "reference": "https://blog.voyageai.com/2024/09/18/voyage-3/", "similarity_fn_name": "cosine", "use_instructions": true, "training_datasets": {}, "adapted_from": null, "superseded_by": null, "modalities": ["text"], "loader": "VoyageWrapper"}
|
|
|
1 |
+
{"name": "voyageai/voyage-3", "revision": "1", "release_date": "2024-09-18", "languages": null, "n_parameters": null, "memory_usage_mb": null, "max_tokens": 32000.0, "embed_dim": 1024, "license": null, "open_weights": false, "public_training_code": null, "public_training_data": null, "framework": ["API"], "reference": "https://blog.voyageai.com/2024/09/18/voyage-3/", "similarity_fn_name": "cosine", "use_instructions": true, "training_datasets": {}, "adapted_from": null, "superseded_by": null, "is_cross_encoder": null, "modalities": ["text"], "loader": "VoyageWrapper"}
|
results/voyageai__voyage-code-2/1/AmazonCounterfactualClassification.json
ADDED
@@ -0,0 +1,95 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "e8379541af4e31359cca9fbcf4b00f2671dba205",
|
3 |
+
"task_name": "AmazonCounterfactualClassification",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"accuracy": 0.677761,
|
9 |
+
"f1": 0.610175,
|
10 |
+
"f1_weighted": 0.70871,
|
11 |
+
"ap": 0.293898,
|
12 |
+
"ap_weighted": 0.293898,
|
13 |
+
"scores_per_experiment": [
|
14 |
+
{
|
15 |
+
"accuracy": 0.61194,
|
16 |
+
"f1": 0.543142,
|
17 |
+
"f1_weighted": 0.651102,
|
18 |
+
"ap": 0.237485,
|
19 |
+
"ap_weighted": 0.237485
|
20 |
+
},
|
21 |
+
{
|
22 |
+
"accuracy": 0.679104,
|
23 |
+
"f1": 0.621697,
|
24 |
+
"f1_weighted": 0.711438,
|
25 |
+
"ap": 0.309108,
|
26 |
+
"ap_weighted": 0.309108
|
27 |
+
},
|
28 |
+
{
|
29 |
+
"accuracy": 0.623881,
|
30 |
+
"f1": 0.571629,
|
31 |
+
"f1_weighted": 0.662735,
|
32 |
+
"ap": 0.270047,
|
33 |
+
"ap_weighted": 0.270047
|
34 |
+
},
|
35 |
+
{
|
36 |
+
"accuracy": 0.698507,
|
37 |
+
"f1": 0.627815,
|
38 |
+
"f1_weighted": 0.726591,
|
39 |
+
"ap": 0.303509,
|
40 |
+
"ap_weighted": 0.303509
|
41 |
+
},
|
42 |
+
{
|
43 |
+
"accuracy": 0.725373,
|
44 |
+
"f1": 0.659883,
|
45 |
+
"f1_weighted": 0.750767,
|
46 |
+
"ap": 0.339386,
|
47 |
+
"ap_weighted": 0.339386
|
48 |
+
},
|
49 |
+
{
|
50 |
+
"accuracy": 0.69403,
|
51 |
+
"f1": 0.630949,
|
52 |
+
"f1_weighted": 0.723862,
|
53 |
+
"ap": 0.312668,
|
54 |
+
"ap_weighted": 0.312668
|
55 |
+
},
|
56 |
+
{
|
57 |
+
"accuracy": 0.737313,
|
58 |
+
"f1": 0.662163,
|
59 |
+
"f1_weighted": 0.759193,
|
60 |
+
"ap": 0.333833,
|
61 |
+
"ap_weighted": 0.333833
|
62 |
+
},
|
63 |
+
{
|
64 |
+
"accuracy": 0.685075,
|
65 |
+
"f1": 0.620148,
|
66 |
+
"f1_weighted": 0.71578,
|
67 |
+
"ap": 0.300975,
|
68 |
+
"ap_weighted": 0.300975
|
69 |
+
},
|
70 |
+
{
|
71 |
+
"accuracy": 0.701493,
|
72 |
+
"f1": 0.633836,
|
73 |
+
"f1_weighted": 0.729683,
|
74 |
+
"ap": 0.311549,
|
75 |
+
"ap_weighted": 0.311549
|
76 |
+
},
|
77 |
+
{
|
78 |
+
"accuracy": 0.620896,
|
79 |
+
"f1": 0.530491,
|
80 |
+
"f1_weighted": 0.65595,
|
81 |
+
"ap": 0.220415,
|
82 |
+
"ap_weighted": 0.220415
|
83 |
+
}
|
84 |
+
],
|
85 |
+
"main_score": 0.677761,
|
86 |
+
"hf_subset": "en",
|
87 |
+
"languages": [
|
88 |
+
"eng-Latn"
|
89 |
+
]
|
90 |
+
}
|
91 |
+
]
|
92 |
+
},
|
93 |
+
"evaluation_time": 22.859440088272095,
|
94 |
+
"kg_co2_emissions": null
|
95 |
+
}
|
results/voyageai__voyage-code-2/1/ArXivHierarchicalClusteringP2P.json
ADDED
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "0bbdb47bcbe3a90093699aefeed338a0f28a7ee8",
|
3 |
+
"task_name": "ArXivHierarchicalClusteringP2P",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"v_measures": {
|
9 |
+
"Level 0": [
|
10 |
+
0.527309,
|
11 |
+
0.57195,
|
12 |
+
0.611848,
|
13 |
+
0.577571,
|
14 |
+
0.632086,
|
15 |
+
0.594536,
|
16 |
+
0.602462,
|
17 |
+
0.552144,
|
18 |
+
0.641989,
|
19 |
+
0.616787
|
20 |
+
],
|
21 |
+
"Level 1": [
|
22 |
+
0.589558,
|
23 |
+
0.578242,
|
24 |
+
0.590679,
|
25 |
+
0.572794,
|
26 |
+
0.542541,
|
27 |
+
0.590643,
|
28 |
+
0.548528,
|
29 |
+
0.581755,
|
30 |
+
0.564301,
|
31 |
+
0.586689
|
32 |
+
]
|
33 |
+
},
|
34 |
+
"v_measure": 0.583721,
|
35 |
+
"v_measure_std": 0.028326,
|
36 |
+
"main_score": 0.583721,
|
37 |
+
"hf_subset": "default",
|
38 |
+
"languages": [
|
39 |
+
"eng-Latn"
|
40 |
+
]
|
41 |
+
}
|
42 |
+
]
|
43 |
+
},
|
44 |
+
"evaluation_time": 23.01714301109314,
|
45 |
+
"kg_co2_emissions": null
|
46 |
+
}
|
results/voyageai__voyage-code-2/1/ArXivHierarchicalClusteringS2S.json
ADDED
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "b73bd54100e5abfa6e3a23dcafb46fe4d2438dc3",
|
3 |
+
"task_name": "ArXivHierarchicalClusteringS2S",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"v_measures": {
|
9 |
+
"Level 0": [
|
10 |
+
0.595005,
|
11 |
+
0.592639,
|
12 |
+
0.571063,
|
13 |
+
0.555542,
|
14 |
+
0.635342,
|
15 |
+
0.619466,
|
16 |
+
0.639564,
|
17 |
+
0.632908,
|
18 |
+
0.601079,
|
19 |
+
0.595422
|
20 |
+
],
|
21 |
+
"Level 1": [
|
22 |
+
0.573586,
|
23 |
+
0.568001,
|
24 |
+
0.547275,
|
25 |
+
0.547429,
|
26 |
+
0.552073,
|
27 |
+
0.580846,
|
28 |
+
0.550669,
|
29 |
+
0.565668,
|
30 |
+
0.542304,
|
31 |
+
0.566985
|
32 |
+
]
|
33 |
+
},
|
34 |
+
"v_measure": 0.581643,
|
35 |
+
"v_measure_std": 0.030341,
|
36 |
+
"main_score": 0.581643,
|
37 |
+
"hf_subset": "default",
|
38 |
+
"languages": [
|
39 |
+
"eng-Latn"
|
40 |
+
]
|
41 |
+
}
|
42 |
+
]
|
43 |
+
},
|
44 |
+
"evaluation_time": 21.45569086074829,
|
45 |
+
"kg_co2_emissions": null
|
46 |
+
}
|
results/voyageai__voyage-code-2/1/BIOSSES.json
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dataset_revision": "d3fb88f8f02e40887cd149695127462bbcf29b4a",
|
3 |
+
"task_name": "BIOSSES",
|
4 |
+
"mteb_version": "1.36.10",
|
5 |
+
"scores": {
|
6 |
+
"test": [
|
7 |
+
{
|
8 |
+
"pearson": 0.899802,
|
9 |
+
"spearman": 0.891334,
|
10 |
+
"cosine_pearson": 0.899802,
|
11 |
+
"cosine_spearman": 0.891334,
|
12 |
+
"manhattan_pearson": 0.891701,
|
13 |
+
"manhattan_spearman": 0.891111,
|
14 |
+
"euclidean_pearson": 0.894141,
|
15 |
+
"euclidean_spearman": 0.891334,
|
16 |
+
"main_score": 0.891334,
|
17 |
+
"hf_subset": "default",
|
18 |
+
"languages": [
|
19 |
+
"eng-Latn"
|
20 |
+
]
|
21 |
+
}
|
22 |
+
]
|
23 |
+
},
|
24 |
+
"evaluation_time": 2.5169851779937744,
|
25 |
+
"kg_co2_emissions": null
|
26 |
+
}
|