Muennighoff commited on
Commit
e1c3953
1 Parent(s): dc534a5
Files changed (34) hide show
  1. paths.json +0 -0
  2. results/bge-base-zh/MmarcoReranking.json +1 -1
  3. results/bge-large-zh-noinstruct/MmarcoReranking.json +1 -1
  4. results/bge-large-zh/MmarcoReranking.json +1 -1
  5. results/bge-small-zh/MmarcoReranking.json +1 -1
  6. results/luotuo-bert-medium/MmarcoReranking.json +1 -1
  7. results/m3e-base/MmarcoReranking.json +1 -1
  8. results/m3e-large/MmarcoReranking.json +1 -1
  9. results/multilingual-e5-base/AFQMC.json +20 -0
  10. results/multilingual-e5-base/ATEC.json +20 -0
  11. results/multilingual-e5-base/BQ.json +20 -0
  12. results/multilingual-e5-base/CLSClusteringP2P.json +10 -0
  13. results/multilingual-e5-base/CLSClusteringS2S.json +10 -0
  14. results/multilingual-e5-base/CMedQAv1.json +10 -0
  15. results/multilingual-e5-base/CMedQAv2.json +10 -0
  16. results/multilingual-e5-base/Cmnli.json +49 -0
  17. results/multilingual-e5-base/IFlyTek.json +13 -0
  18. results/multilingual-e5-base/JDReview.json +15 -0
  19. results/multilingual-e5-base/LCQMC.json +20 -0
  20. results/multilingual-e5-base/MMarcoReranking.json +10 -0
  21. results/multilingual-e5-base/MultilingualSentiment.json +13 -0
  22. results/multilingual-e5-base/Ocnli.json +49 -0
  23. results/multilingual-e5-base/OnlineShopping.json +15 -0
  24. results/multilingual-e5-base/PAWSX.json +20 -0
  25. results/multilingual-e5-base/QBQTC.json +20 -0
  26. results/multilingual-e5-base/STS22.json +22 -0
  27. results/multilingual-e5-base/STSB.json +20 -0
  28. results/multilingual-e5-base/T2Reranking.json +10 -0
  29. results/multilingual-e5-base/TNews.json +13 -0
  30. results/multilingual-e5-base/ThuNewsClusteringP2P.json +10 -0
  31. results/multilingual-e5-base/ThuNewsClusteringS2S.json +10 -0
  32. results/multilingual-e5-base/Waimai.json +15 -0
  33. results/text-embedding-ada-002/merge_cqadupstack.py +0 -66
  34. results/text2vec-large-chinese/MmarcoReranking.json +1 -1
paths.json CHANGED
The diff for this file is too large to render. See raw diff
 
results/bge-base-zh/MmarcoReranking.json CHANGED
@@ -5,6 +5,6 @@
5
  "map": 0.28242481419180376,
6
  "mrr": 0.2667063492063492
7
  },
8
- "mteb_dataset_name": "MmarcoReranking",
9
  "mteb_version": "1.0.2"
10
  }
 
5
  "map": 0.28242481419180376,
6
  "mrr": 0.2667063492063492
7
  },
8
+ "mteb_dataset_name": "MMarcoReranking",
9
  "mteb_version": "1.0.2"
10
  }
results/bge-large-zh-noinstruct/MmarcoReranking.json CHANGED
@@ -5,6 +5,6 @@
5
  "map": 0.27102013701446426,
6
  "mrr": 0.26324206349206347
7
  },
8
- "mteb_dataset_name": "MmarcoReranking",
9
  "mteb_version": "1.0.2"
10
  }
 
5
  "map": 0.27102013701446426,
6
  "mrr": 0.26324206349206347
7
  },
8
+ "mteb_dataset_name": "MMarcoReranking",
9
  "mteb_version": "1.0.2"
10
  }
results/bge-large-zh/MmarcoReranking.json CHANGED
@@ -5,6 +5,6 @@
5
  "map": 0.26232690178149815,
6
  "mrr": 0.25287301587301586
7
  },
8
- "mteb_dataset_name": "MmarcoReranking",
9
  "mteb_version": "1.0.2"
10
  }
 
5
  "map": 0.26232690178149815,
6
  "mrr": 0.25287301587301586
7
  },
8
+ "mteb_dataset_name": "MMarcoReranking",
9
  "mteb_version": "1.0.2"
10
  }
results/bge-small-zh/MmarcoReranking.json CHANGED
@@ -5,6 +5,6 @@
5
  "map": 0.2282096733248694,
6
  "mrr": 0.21677380952380954
7
  },
8
- "mteb_dataset_name": "MmarcoReranking",
9
  "mteb_version": "1.0.2"
10
  }
 
5
  "map": 0.2282096733248694,
6
  "mrr": 0.21677380952380954
7
  },
8
+ "mteb_dataset_name": "MMarcoReranking",
9
  "mteb_version": "1.0.2"
10
  }
results/luotuo-bert-medium/MmarcoReranking.json CHANGED
@@ -5,6 +5,6 @@
5
  "map": 0.1454654108166253,
6
  "mrr": 0.12848412698412698
7
  },
8
- "mteb_dataset_name": "MmarcoReranking",
9
  "mteb_version": "1.0.2"
10
  }
 
5
  "map": 0.1454654108166253,
6
  "mrr": 0.12848412698412698
7
  },
8
+ "mteb_dataset_name": "MMarcoReranking",
9
  "mteb_version": "1.0.2"
10
  }
results/m3e-base/MmarcoReranking.json CHANGED
@@ -5,6 +5,6 @@
5
  "map": 0.17509891535107402,
6
  "mrr": 0.16049603174603175
7
  },
8
- "mteb_dataset_name": "MmarcoReranking",
9
  "mteb_version": "1.0.2"
10
  }
 
5
  "map": 0.17509891535107402,
6
  "mrr": 0.16049603174603175
7
  },
8
+ "mteb_dataset_name": "MMarcoReranking",
9
  "mteb_version": "1.0.2"
10
  }
results/m3e-large/MmarcoReranking.json CHANGED
@@ -5,6 +5,6 @@
5
  "map": 0.16458504717332473,
6
  "mrr": 0.14978968253968256
7
  },
8
- "mteb_dataset_name": "MmarcoReranking",
9
  "mteb_version": "1.0.2"
10
  }
 
5
  "map": 0.16458504717332473,
6
  "mrr": 0.14978968253968256
7
  },
8
+ "mteb_dataset_name": "MMarcoReranking",
9
  "mteb_version": "1.0.2"
10
  }
results/multilingual-e5-base/AFQMC.json ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": null,
3
+ "mteb_dataset_name": "AFQMC",
4
+ "mteb_version": "1.1.1.dev0",
5
+ "test": {
6
+ "cos_sim": {
7
+ "pearson": NaN,
8
+ "spearman": NaN
9
+ },
10
+ "euclidean": {
11
+ "pearson": NaN,
12
+ "spearman": NaN
13
+ },
14
+ "evaluation_time": 4.59,
15
+ "manhattan": {
16
+ "pearson": NaN,
17
+ "spearman": NaN
18
+ }
19
+ }
20
+ }
results/multilingual-e5-base/ATEC.json ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": null,
3
+ "mteb_dataset_name": "ATEC",
4
+ "mteb_version": "1.1.1.dev0",
5
+ "test": {
6
+ "cos_sim": {
7
+ "pearson": 0.373550586257545,
8
+ "spearman": 0.3701257946883062
9
+ },
10
+ "euclidean": {
11
+ "pearson": 0.39803864674686384,
12
+ "spearman": 0.3696591560768722
13
+ },
14
+ "evaluation_time": 23.06,
15
+ "manhattan": {
16
+ "pearson": 0.39785415725026396,
17
+ "spearman": 0.3698413102943949
18
+ }
19
+ }
20
+ }
results/multilingual-e5-base/BQ.json ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": null,
3
+ "mteb_dataset_name": "BQ",
4
+ "mteb_version": "1.1.1.dev0",
5
+ "test": {
6
+ "cos_sim": {
7
+ "pearson": 0.45303599781483117,
8
+ "spearman": 0.45448247854254875
9
+ },
10
+ "euclidean": {
11
+ "pearson": 0.4522686095980617,
12
+ "spearman": 0.4542176825905819
13
+ },
14
+ "evaluation_time": 12.41,
15
+ "manhattan": {
16
+ "pearson": 0.4532868838598347,
17
+ "spearman": 0.4551567659021363
18
+ }
19
+ }
20
+ }
results/multilingual-e5-base/CLSClusteringP2P.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": null,
3
+ "mteb_dataset_name": "CLSClusteringP2P",
4
+ "mteb_version": "1.1.1.dev0",
5
+ "test": {
6
+ "evaluation_time": 994.39,
7
+ "v_measure": 0.3240836272693557,
8
+ "v_measure_std": 0.014062578366647911
9
+ }
10
+ }
results/multilingual-e5-base/CLSClusteringS2S.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": null,
3
+ "mteb_dataset_name": "CLSClusteringS2S",
4
+ "mteb_version": "1.1.1.dev0",
5
+ "test": {
6
+ "evaluation_time": 68.17,
7
+ "v_measure": 0.36989955642239736,
8
+ "v_measure_std": 0.014148037653818231
9
+ }
10
+ }
results/multilingual-e5-base/CMedQAv1.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": null,
3
+ "mteb_dataset_name": "CMedQAv1",
4
+ "mteb_version": "1.1.1.dev0",
5
+ "test": {
6
+ "evaluation_time": 359.86,
7
+ "map": 0.6520611444006958,
8
+ "mrr": 0.7082547619047619
9
+ }
10
+ }
results/multilingual-e5-base/CMedQAv2.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": null,
3
+ "mteb_dataset_name": "CMedQAv2",
4
+ "mteb_version": "1.1.1.dev0",
5
+ "test": {
6
+ "evaluation_time": 345.36,
7
+ "map": 0.660602348976891,
8
+ "mrr": 0.7095924603174604
9
+ }
10
+ }
results/multilingual-e5-base/Cmnli.json ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": null,
3
+ "mteb_dataset_name": "Cmnli",
4
+ "mteb_version": "1.1.1.dev0",
5
+ "validation": {
6
+ "cos_sim": {
7
+ "accuracy": 0.67083583884546,
8
+ "accuracy_threshold": 0.9160473346710205,
9
+ "ap": 0.7451262220072653,
10
+ "f1": 0.7099406528189912,
11
+ "f1_threshold": 0.8881622552871704,
12
+ "precision": 0.5882895343476257,
13
+ "recall": 0.895019873743278
14
+ },
15
+ "dot": {
16
+ "accuracy": 0.5526157546602526,
17
+ "accuracy_threshold": 260.6299743652344,
18
+ "ap": 0.5833343142596583,
19
+ "f1": 0.6792153125248194,
20
+ "f1_threshold": 170.95211791992188,
21
+ "precision": 0.5143132066394034,
22
+ "recall": 0.999766191255553
23
+ },
24
+ "euclidean": {
25
+ "accuracy": 0.6778111846061335,
26
+ "accuracy_threshold": 7.05479097366333,
27
+ "ap": 0.7498396139781417,
28
+ "f1": 0.7167630057803467,
29
+ "f1_threshold": 7.7890119552612305,
30
+ "precision": 0.6095362936260855,
31
+ "recall": 0.8697685293429974
32
+ },
33
+ "evaluation_time": 13.62,
34
+ "manhattan": {
35
+ "accuracy": 0.6754058929645219,
36
+ "accuracy_threshold": 155.12338256835938,
37
+ "ap": 0.7486919271065671,
38
+ "f1": 0.7158293838862558,
39
+ "f1_threshold": 173.24166870117188,
40
+ "precision": 0.6019448429778416,
41
+ "recall": 0.8828618190320318
42
+ },
43
+ "max": {
44
+ "accuracy": 0.6778111846061335,
45
+ "ap": 0.7498396139781417,
46
+ "f1": 0.7167630057803467
47
+ }
48
+ }
49
+ }
results/multilingual-e5-base/IFlyTek.json ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": null,
3
+ "mteb_dataset_name": "IFlyTek",
4
+ "mteb_version": "1.1.1.dev0",
5
+ "validation": {
6
+ "accuracy": 0.4493266641015775,
7
+ "accuracy_stderr": 0.003512954893578556,
8
+ "evaluation_time": 254.82,
9
+ "f1": 0.36853572831286635,
10
+ "f1_stderr": 0.004218344207296586,
11
+ "main_score": 0.4493266641015775
12
+ }
13
+ }
results/multilingual-e5-base/JDReview.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": null,
3
+ "mteb_dataset_name": "JDReview",
4
+ "mteb_version": "1.1.1.dev0",
5
+ "test": {
6
+ "accuracy": 0.7621013133208254,
7
+ "accuracy_stderr": 0.020668576891437772,
8
+ "ap": 0.3800848445284182,
9
+ "ap_stderr": 0.02019610804976287,
10
+ "evaluation_time": 15.17,
11
+ "f1": 0.6972380312259057,
12
+ "f1_stderr": 0.01793508902745797,
13
+ "main_score": 0.7621013133208254
14
+ }
15
+ }
results/multilingual-e5-base/LCQMC.json ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": null,
3
+ "mteb_dataset_name": "LCQMC",
4
+ "mteb_version": "1.1.1.dev0",
5
+ "test": {
6
+ "cos_sim": {
7
+ "pearson": 0.6814095532091424,
8
+ "spearman": 0.741480433718059
9
+ },
10
+ "euclidean": {
11
+ "pearson": 0.7294175071690869,
12
+ "spearman": 0.7422597401119065
13
+ },
14
+ "evaluation_time": 9.45,
15
+ "manhattan": {
16
+ "pearson": 0.7288460318935632,
17
+ "spearman": 0.7414605912619334
18
+ }
19
+ }
20
+ }
results/multilingual-e5-base/MMarcoReranking.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": null,
3
+ "dev": {
4
+ "evaluation_time": 367.86,
5
+ "map": 0.21758599602003656,
6
+ "mrr": 0.20473412698412694
7
+ },
8
+ "mteb_dataset_name": "MMarcoReranking",
9
+ "mteb_version": "1.1.1.dev0"
10
+ }
results/multilingual-e5-base/MultilingualSentiment.json ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": null,
3
+ "mteb_dataset_name": "MultilingualSentiment",
4
+ "mteb_version": "1.1.1.dev0",
5
+ "validation": {
6
+ "accuracy": 0.6528,
7
+ "accuracy_stderr": 0.015590737991227706,
8
+ "evaluation_time": 23.58,
9
+ "f1": 0.6522681687065506,
10
+ "f1_stderr": 0.016840153401201912,
11
+ "main_score": 0.6528
12
+ }
13
+ }
results/multilingual-e5-base/Ocnli.json ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": null,
3
+ "mteb_dataset_name": "Ocnli",
4
+ "mteb_version": "1.1.1.dev0",
5
+ "validation": {
6
+ "cos_sim": {
7
+ "accuracy": 0.5804006497022198,
8
+ "accuracy_threshold": 0.9106633067131042,
9
+ "ap": 0.5962534282768569,
10
+ "f1": 0.6782231852654387,
11
+ "f1_threshold": 0.8347713947296143,
12
+ "precision": 0.5153677277716795,
13
+ "recall": 0.9915522703273495
14
+ },
15
+ "dot": {
16
+ "accuracy": 0.5154304277206281,
17
+ "accuracy_threshold": 220.6337127685547,
18
+ "ap": 0.5164804174436244,
19
+ "f1": 0.6783667621776504,
20
+ "f1_threshold": 208.66842651367188,
21
+ "precision": 0.5132791327913279,
22
+ "recall": 1.0
23
+ },
24
+ "euclidean": {
25
+ "accuracy": 0.5885219274499188,
26
+ "accuracy_threshold": 7.201763153076172,
27
+ "ap": 0.6039135198006501,
28
+ "f1": 0.6795058139534884,
29
+ "f1_threshold": 9.642261505126953,
30
+ "precision": 0.518005540166205,
31
+ "recall": 0.9873284054910243
32
+ },
33
+ "evaluation_time": 2.96,
34
+ "manhattan": {
35
+ "accuracy": 0.5912290200324851,
36
+ "accuracy_threshold": 158.3519287109375,
37
+ "ap": 0.6046932716509501,
38
+ "f1": 0.6792316056542226,
39
+ "f1_threshold": 214.6934814453125,
40
+ "precision": 0.5171081677704195,
41
+ "recall": 0.989440337909187
42
+ },
43
+ "max": {
44
+ "accuracy": 0.5912290200324851,
45
+ "ap": 0.6046932716509501,
46
+ "f1": 0.6795058139534884
47
+ }
48
+ }
49
+ }
results/multilingual-e5-base/OnlineShopping.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": null,
3
+ "mteb_dataset_name": "OnlineShopping",
4
+ "mteb_version": "1.1.1.dev0",
5
+ "test": {
6
+ "accuracy": 0.884,
7
+ "accuracy_stderr": 0.021004761364985816,
8
+ "ap": 0.8588925493540545,
9
+ "ap_stderr": 0.023481667516592298,
10
+ "evaluation_time": 13.69,
11
+ "f1": 0.8839123392161612,
12
+ "f1_stderr": 0.020984685258322238,
13
+ "main_score": 0.884
14
+ }
15
+ }
results/multilingual-e5-base/PAWSX.json ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": null,
3
+ "mteb_dataset_name": "PAWSX",
4
+ "mteb_version": "1.1.1.dev0",
5
+ "test": {
6
+ "cos_sim": {
7
+ "pearson": 0.12180372023646169,
8
+ "spearman": 0.12139273779665764
9
+ },
10
+ "euclidean": {
11
+ "pearson": 0.13834888637912623,
12
+ "spearman": 0.1135323752139316
13
+ },
14
+ "evaluation_time": 4.54,
15
+ "manhattan": {
16
+ "pearson": 0.13870252363945929,
17
+ "spearman": 0.11384069303176406
18
+ }
19
+ }
20
+ }
results/multilingual-e5-base/QBQTC.json ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": null,
3
+ "mteb_dataset_name": "QBQTC",
4
+ "mteb_version": "1.1.1.dev0",
5
+ "test": {
6
+ "cos_sim": {
7
+ "pearson": 0.26523520497461966,
8
+ "spearman": 0.2881374545462317
9
+ },
10
+ "euclidean": {
11
+ "pearson": 0.27024025636798843,
12
+ "spearman": 0.28985097167369306
13
+ },
14
+ "evaluation_time": 7.8,
15
+ "manhattan": {
16
+ "pearson": 0.2720676214772048,
17
+ "spearman": 0.29194526688581
18
+ }
19
+ }
20
+ }
results/multilingual-e5-base/STS22.json ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "6d1ba47164174a496b7fa5d3569dae26a6813b80",
3
+ "mteb_dataset_name": "STS22",
4
+ "mteb_version": "1.1.0",
5
+ "test": {
6
+ "evaluation_time": 15.78,
7
+ "zh": {
8
+ "cos_sim": {
9
+ "pearson": 0.580593994248265,
10
+ "spearman": 0.6564042377559131
11
+ },
12
+ "euclidean": {
13
+ "pearson": 0.4745454874584024,
14
+ "spearman": 0.6263254821825651
15
+ },
16
+ "manhattan": {
17
+ "pearson": 0.47547561695110807,
18
+ "spearman": 0.6245144767699952
19
+ }
20
+ }
21
+ }
22
+ }
results/multilingual-e5-base/STSB.json ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": null,
3
+ "mteb_dataset_name": "STSB",
4
+ "mteb_version": "1.1.1.dev0",
5
+ "test": {
6
+ "cos_sim": {
7
+ "pearson": 0.7923536624786671,
8
+ "spearman": 0.7904542774401909
9
+ },
10
+ "euclidean": {
11
+ "pearson": 0.7848397831159944,
12
+ "spearman": 0.7878225614423001
13
+ },
14
+ "evaluation_time": 2.11,
15
+ "manhattan": {
16
+ "pearson": 0.7831002065895636,
17
+ "spearman": 0.7857187450963864
18
+ }
19
+ }
20
+ }
results/multilingual-e5-base/T2Reranking.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": null,
3
+ "dev": {
4
+ "evaluation_time": 1139.17,
5
+ "map": 0.643866753565647,
6
+ "mrr": 0.725825421005685
7
+ },
8
+ "mteb_dataset_name": "T2Reranking",
9
+ "mteb_version": "1.1.1.dev0"
10
+ }
results/multilingual-e5-base/TNews.json ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": null,
3
+ "mteb_dataset_name": "TNews",
4
+ "mteb_version": "1.1.1.dev0",
5
+ "validation": {
6
+ "accuracy": 0.47056000000000003,
7
+ "accuracy_stderr": 0.008537470351339442,
8
+ "evaluation_time": 30.17,
9
+ "f1": 0.45538348928071193,
10
+ "f1_stderr": 0.008124697989629562,
11
+ "main_score": 0.47056000000000003
12
+ }
13
+ }
results/multilingual-e5-base/ThuNewsClusteringP2P.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": null,
3
+ "mteb_dataset_name": "ThuNewsClusteringP2P",
4
+ "mteb_version": "1.1.1.dev0",
5
+ "test": {
6
+ "evaluation_time": 445.69,
7
+ "v_measure": 0.40978660903187053,
8
+ "v_measure_std": 0.020457578952846982
9
+ }
10
+ }
results/multilingual-e5-base/ThuNewsClusteringS2S.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": null,
3
+ "mteb_dataset_name": "ThuNewsClusteringS2S",
4
+ "mteb_version": "1.1.1.dev0",
5
+ "test": {
6
+ "evaluation_time": 59.43,
7
+ "v_measure": 0.5236140107043674,
8
+ "v_measure_std": 0.020888120540309855
9
+ }
10
+ }
results/multilingual-e5-base/Waimai.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": null,
3
+ "mteb_dataset_name": "Waimai",
4
+ "mteb_version": "1.1.1.dev0",
5
+ "test": {
6
+ "accuracy": 0.8442000000000001,
7
+ "accuracy_stderr": 0.013075167302944933,
8
+ "ap": 0.6580622339164859,
9
+ "ap_stderr": 0.021630020878655898,
10
+ "evaluation_time": 6.05,
11
+ "f1": 0.8243508593454243,
12
+ "f1_stderr": 0.012334290325224843,
13
+ "main_score": 0.8442000000000001
14
+ }
15
+ }
results/text-embedding-ada-002/merge_cqadupstack.py DELETED
@@ -1,66 +0,0 @@
1
- """
2
- Merges CQADupstack subset results
3
- Usage: python merge_cqadupstack.py path_to_results_folder
4
- """
5
- import glob
6
- import json
7
- import logging
8
- import os
9
- import sys
10
-
11
- logging.basicConfig(level=logging.INFO)
12
- logger = logging.getLogger(__name__)
13
-
14
- TASK_LIST_CQA = [
15
- "CQADupstackAndroidRetrieval",
16
- "CQADupstackEnglishRetrieval",
17
- "CQADupstackGamingRetrieval",
18
- "CQADupstackGisRetrieval",
19
- "CQADupstackMathematicaRetrieval",
20
- "CQADupstackPhysicsRetrieval",
21
- "CQADupstackProgrammersRetrieval",
22
- "CQADupstackStatsRetrieval",
23
- "CQADupstackTexRetrieval",
24
- "CQADupstackUnixRetrieval",
25
- "CQADupstackWebmastersRetrieval",
26
- "CQADupstackWordpressRetrieval",
27
- ]
28
-
29
- NOAVG_KEYS = [
30
- "evaluation_time",
31
- "mteb_version",
32
- "mteb_dataset_name",
33
- "dataset_revision",
34
- ]
35
-
36
-
37
- results_folder = sys.argv[1]
38
- # Ensure at least 1 character btw CQADupstack & Retrieval
39
- files = glob.glob(f'{results_folder.strip("/")}/CQADupstack*?*Retrieval.json')
40
-
41
- logger.info(f"Found CQADupstack files: {files}")
42
-
43
- if len(files) == len(TASK_LIST_CQA):
44
- all_results = {}
45
- for file_name in files:
46
- with open(file_name, 'r', encoding='utf-8') as f:
47
- results = json.load(f)
48
- for split, split_results in results.items():
49
- if split not in ("train", "validation", "dev", "test"):
50
- all_results[split] = split_results
51
- continue
52
- all_results.setdefault(split, {})
53
- for metric, score in split_results.items():
54
- all_results[split].setdefault(metric, 0)
55
- if metric == "evaluation_time":
56
- score = all_results[split][metric] + score
57
- elif metric not in NOAVG_KEYS:
58
- score = all_results[split][metric] + score * 1/len(TASK_LIST_CQA)
59
- all_results[split][metric] = score
60
- all_results["mteb_dataset_name"] = "CQADupstackRetrieval"
61
-
62
- logger.info("Saving ", all_results)
63
- with open(os.path.join(results_folder, "CQADupstackRetrieval.json"), 'w', encoding='utf-8') as f:
64
- json.dump(all_results, f, indent=4)
65
- else:
66
- logger.warning(f"Got {len(files)}, but expected {len(TASK_LIST_CQA)} files. Missing: {set(TASK_LIST_CQA) - set([x.split('/')[-1].split('.')[0] for x in files])}; Too much: {set([x.split('/')[-1].split('.')[0] for x in files]) - set(TASK_LIST_CQA)}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
results/text2vec-large-chinese/MmarcoReranking.json CHANGED
@@ -5,6 +5,6 @@
5
  "map": 0.12481971498981873,
6
  "mrr": 0.11227777777777778
7
  },
8
- "mteb_dataset_name": "MmarcoReranking",
9
  "mteb_version": "1.0.2"
10
  }
 
5
  "map": 0.12481971498981873,
6
  "mrr": 0.11227777777777778
7
  },
8
+ "mteb_dataset_name": "MMarcoReranking",
9
  "mteb_version": "1.0.2"
10
  }