Muennighoff imenelydiaker commited on
Commit
6cd477a
1 Parent(s): dd4e753

Add missing results for French (#31)

Browse files

- Add missing results (5e2004d3b0a4c0b39b4a955c4f0df21fffb7e0c4)


Co-authored-by: Imene Kerboua <imenelydiaker@users.noreply.huggingface.co>

results/paraphrase-multilingual-MiniLM-L12-v2/MassiveIntentClassification.json CHANGED
@@ -10,6 +10,13 @@
10
  "f1": 0.5719052969420737,
11
  "f1_stderr": 0.010658104148977046,
12
  "main_score": 0.5943174176193678
 
 
 
 
 
 
 
13
  }
14
  }
15
  }
 
10
  "f1": 0.5719052969420737,
11
  "f1_stderr": 0.010658104148977046,
12
  "main_score": 0.5943174176193678
13
+ },
14
+ "fr": {
15
+ "accuracy": 0.5751513113651648,
16
+ "accuracy_stderr": 0.019739330808084554,
17
+ "f1": 0.5523046479534983,
18
+ "f1_stderr": 0.019565605090496704,
19
+ "main_score": 0.5751513113651648
20
  }
21
  }
22
  }
results/paraphrase-multilingual-MiniLM-L12-v2/MassiveScenarioClassification.json CHANGED
@@ -10,6 +10,13 @@
10
  "f1": 0.6457997326589723,
11
  "f1_stderr": 0.019460538209521996,
12
  "main_score": 0.6504371217215871
 
 
 
 
 
 
 
13
  }
14
  }
15
  }
 
10
  "f1": 0.6457997326589723,
11
  "f1_stderr": 0.019460538209521996,
12
  "main_score": 0.6504371217215871
13
+ },
14
+ "fr": {
15
+ "accuracy": 0.6451916610625421,
16
+ "accuracy_stderr": 0.017556369249253807,
17
+ "f1": 0.6397670650302107,
18
+ "f1_stderr": 0.018203007308436978,
19
+ "main_score": 0.6451916610625421
20
  }
21
  }
22
  }
results/paraphrase-multilingual-MiniLM-L12-v2/STS22.json CHANGED
@@ -17,6 +17,20 @@
17
  "pearson": 0.18802290623288662,
18
  "spearman": 0.32720834567221585
19
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
20
  }
21
  }
22
  }
 
17
  "pearson": 0.18802290623288662,
18
  "spearman": 0.32720834567221585
19
  }
20
+ },
21
+ "fr": {
22
+ "cos_sim": {
23
+ "pearson": 0.6876645443753925,
24
+ "spearman": 0.7054517669493489
25
+ },
26
+ "euclidean": {
27
+ "pearson": 0.6887349071501192,
28
+ "spearman": 0.7054517669493489
29
+ },
30
+ "manhattan": {
31
+ "pearson": 0.6878125039694886,
32
+ "spearman": 0.7134927441731664
33
+ }
34
  }
35
  }
36
  }
results/paraphrase-multilingual-mpnet-base-v2/MassiveIntentClassification.json CHANGED
@@ -10,6 +10,13 @@
10
  "f1": 0.616942376654025,
11
  "f1_stderr": 0.01154911180505358,
12
  "main_score": 0.6429051782111633
 
 
 
 
 
 
 
13
  }
14
  }
15
  }
 
10
  "f1": 0.616942376654025,
11
  "f1_stderr": 0.01154911180505358,
12
  "main_score": 0.6429051782111633
13
+ },
14
+ "fr": {
15
+ "accuracy": 0.6187962340282447,
16
+ "accuracy_stderr": 0.015346336375965294,
17
+ "f1": 0.5930980931527273,
18
+ "f1_stderr": 0.013951896111200538,
19
+ "main_score": 0.6187962340282447
20
  }
21
  }
22
  }
results/paraphrase-multilingual-mpnet-base-v2/MassiveScenarioClassification.json CHANGED
@@ -10,6 +10,13 @@
10
  "f1": 0.6868067367060983,
11
  "f1_stderr": 0.012743491596311203,
12
  "main_score": 0.6898453261600539
 
 
 
 
 
 
 
13
  }
14
  }
15
  }
 
10
  "f1": 0.6868067367060983,
11
  "f1_stderr": 0.012743491596311203,
12
  "main_score": 0.6898453261600539
13
+ },
14
+ "fr": {
15
+ "accuracy": 0.678950907868191,
16
+ "accuracy_stderr": 0.013931200089692352,
17
+ "f1": 0.6799784896999377,
18
+ "f1_stderr": 0.013507015697804235,
19
+ "main_score": 0.678950907868191
20
  }
21
  }
22
  }
results/paraphrase-multilingual-mpnet-base-v2/STS22.json CHANGED
@@ -17,6 +17,20 @@
17
  "pearson": 0.17487153889423696,
18
  "spearman": 0.29385036721704194
19
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
20
  }
21
  }
22
  }
 
17
  "pearson": 0.17487153889423696,
18
  "spearman": 0.29385036721704194
19
  }
20
+ },
21
+ "fr": {
22
+ "cos_sim": {
23
+ "pearson": 0.7218110510521758,
24
+ "spearman": 0.7430483195474878
25
+ },
26
+ "euclidean": {
27
+ "pearson": 0.713506902559743,
28
+ "spearman": 0.7430483195474878
29
+ },
30
+ "manhattan": {
31
+ "pearson": 0.7163030517164597,
32
+ "spearman": 0.7472570955048577
33
+ }
34
  }
35
  }
36
  }
results/sentence-t5-xxl/AmazonReviewsClassification.json CHANGED
@@ -6,6 +6,13 @@
6
  "f1": 0.4548832592405988,
7
  "f1_stderr": 0.019829148494805587,
8
  "main_score": 0.48926
 
 
 
 
 
 
 
9
  },
10
  "evaluation_time": 1896.37
11
  },
 
6
  "f1": 0.4548832592405988,
7
  "f1_stderr": 0.019829148494805587,
8
  "main_score": 0.48926
9
+ },
10
+ "fr": {
11
+ "accuracy": 0.46088000000000007,
12
+ "accuracy_stderr": 0.017211902858196702,
13
+ "f1": 0.4264185814244663,
14
+ "f1_stderr": 0.02488062615048215,
15
+ "main_score": 0.46088000000000007
16
  },
17
  "evaluation_time": 1896.37
18
  },
results/sentence-t5-xxl/MTOPDomainClassification.json CHANGED
@@ -7,6 +7,13 @@
7
  "f1_stderr": 0.013092896503399553,
8
  "main_score": 0.9249430004559963
9
  },
 
 
 
 
 
 
 
10
  "evaluation_time": 416.07
11
  },
12
  "mteb_version": "0.0.2",
 
7
  "f1_stderr": 0.013092896503399553,
8
  "main_score": 0.9249430004559963
9
  },
10
+ "fr": {
11
+ "accuracy": 0.8620106482931412,
12
+ "accuracy_stderr": 0.008921323498003793,
13
+ "f1": 0.8611709558129327,
14
+ "f1_stderr": 0.00851141668747865,
15
+ "main_score": 0.8620106482931412
16
+ },
17
  "evaluation_time": 416.07
18
  },
19
  "mteb_version": "0.0.2",
results/sentence-t5-xxl/MTOPIntentClassification.json CHANGED
@@ -7,6 +7,13 @@
7
  "f1_stderr": 0.013081258889285574,
8
  "main_score": 0.683264933880529
9
  },
 
 
 
 
 
 
 
10
  "evaluation_time": 479.68
11
  },
12
  "mteb_version": "0.0.2",
 
7
  "f1_stderr": 0.013081258889285574,
8
  "main_score": 0.683264933880529
9
  },
10
+ "fr": {
11
+ "accuracy": 0.5833385530848731,
12
+ "accuracy_stderr": 0.0227305071142759,
13
+ "f1": 0.42271947131722537,
14
+ "f1_stderr": 0.013817267193851573,
15
+ "main_score": 0.5833385530848731
16
+ },
17
  "evaluation_time": 479.68
18
  },
19
  "mteb_version": "0.0.2",
results/sentence-t5-xxl/MassiveIntentClassification.json CHANGED
@@ -7,6 +7,13 @@
7
  "f1_stderr": 0.007824228028244548,
8
  "main_score": 0.7344317417619368
9
  },
 
 
 
 
 
 
 
10
  "evaluation_time": 350.86
11
  },
12
  "mteb_version": "0.0.2",
 
7
  "f1_stderr": 0.007824228028244548,
8
  "main_score": 0.7344317417619368
9
  },
10
+ "fr": {
11
+ "accuracy": 0.6590786819098857,
12
+ "accuracy_stderr": 0.013280243031299545,
13
+ "f1": 0.6238270977617771,
14
+ "f1_stderr": 0.013051814817156253,
15
+ "main_score": 0.6590786819098857
16
+ },
17
  "evaluation_time": 350.86
18
  },
19
  "mteb_version": "0.0.2",
results/sentence-t5-xxl/MassiveScenarioClassification.json CHANGED
@@ -7,6 +7,13 @@
7
  "f1_stderr": 0.012690634519489478,
8
  "main_score": 0.7481842636180229
9
  },
 
 
 
 
 
 
 
10
  "evaluation_time": 320.61
11
  },
12
  "mteb_version": "0.0.2",
 
7
  "f1_stderr": 0.012690634519489478,
8
  "main_score": 0.7481842636180229
9
  },
10
+ "fr": {
11
+ "accuracy": 0.6853059852051111,
12
+ "accuracy_stderr": 0.015418661016383327,
13
+ "f1": 0.6812572021569442,
14
+ "f1_stderr": 0.014189310611148103,
15
+ "main_score": 0.6853059852051111
16
+ },
17
  "evaluation_time": 320.61
18
  },
19
  "mteb_version": "0.0.2",
results/sentence-t5-xxl/STS22.json CHANGED
@@ -14,6 +14,20 @@
14
  "spearman": 0.6586474519033438
15
  }
16
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
  "evaluation_time": 62.5
18
  },
19
  "mteb_version": "0.0.2",
 
14
  "spearman": 0.6586474519033438
15
  }
16
  },
17
+ "fr": {
18
+ "cos_sim": {
19
+ "pearson": 0.750938132766144,
20
+ "spearman": 0.7680240821366182
21
+ },
22
+ "euclidean": {
23
+ "pearson": 0.7473171169159765,
24
+ "spearman": 0.7680240821366182
25
+ },
26
+ "manhattan": {
27
+ "pearson": 0.7473231480609939,
28
+ "spearman": 0.7681791423034687
29
+ }
30
+ },
31
  "evaluation_time": 62.5
32
  },
33
  "mteb_version": "0.0.2",
results/text-embedding-ada-002/AmazonReviewsClassification.json CHANGED
@@ -17,6 +17,13 @@
17
  "f1": 0.429266381272088,
18
  "f1_stderr": 0.018528639613560428,
19
  "main_score": 0.4478000000000001
 
 
 
 
 
 
 
20
  }
21
  },
22
  "validation": {
 
17
  "f1": 0.429266381272088,
18
  "f1_stderr": 0.018528639613560428,
19
  "main_score": 0.4478000000000001
20
+ },
21
+ "fr": {
22
+ "accuracy": 0.43764000000000003,
23
+ "accuracy_stderr": 0.01848108221939397,
24
+ "f1": 0.41730153006471193,
25
+ "f1_stderr": 0.01990112539123184,
26
+ "main_score": 0.43764000000000003
27
  }
28
  },
29
  "validation": {
results/text-embedding-ada-002/MTOPDomainClassification.json CHANGED
@@ -10,6 +10,13 @@
10
  "f1_stderr": 0.00628709766087384,
11
  "main_score": 0.9212722298221614
12
  },
 
 
 
 
 
 
 
13
  "evaluation_time": 81.07
14
  }
15
  }
 
10
  "f1_stderr": 0.00628709766087384,
11
  "main_score": 0.9212722298221614
12
  },
13
+ "fr": {
14
+ "accuracy": 0.8937676166614468,
15
+ "accuracy_stderr": 0.005242086051464493,
16
+ "f1": 0.8918839551737154,
17
+ "f1_stderr": 0.005827912335190611,
18
+ "main_score": 0.8937676166614468
19
+ },
20
  "evaluation_time": 81.07
21
  }
22
  }
results/text-embedding-ada-002/MTOPIntentClassification.json CHANGED
@@ -10,6 +10,13 @@
10
  "f1_stderr": 0.009254421795011528,
11
  "main_score": 0.6468308253533971
12
  },
 
 
 
 
 
 
 
13
  "evaluation_time": 186.91
14
  }
15
  }
 
10
  "f1_stderr": 0.009254421795011528,
11
  "main_score": 0.6468308253533971
12
  },
13
+ "fr": {
14
+ "accuracy": 0.6445349201378014,
15
+ "accuracy_stderr": 0.02596000279882305,
16
+ "f1": 0.45329324493064826,
17
+ "f1_stderr": 0.019660718408695918,
18
+ "main_score": 0.6445349201378014
19
+ },
20
  "evaluation_time": 186.91
21
  }
22
  }
results/text-embedding-ada-002/MassiveIntentClassification.json CHANGED
@@ -17,6 +17,13 @@
17
  "f1": 0.6759342809572279,
18
  "f1_stderr": 0.01495992009622014,
19
  "main_score": 0.701546738399462
 
 
 
 
 
 
 
20
  }
21
  },
22
  "validation": {
 
17
  "f1": 0.6759342809572279,
18
  "f1_stderr": 0.01495992009622014,
19
  "main_score": 0.701546738399462
20
+ },
21
+ "fr": {
22
+ "accuracy": 0.654203093476799,
23
+ "accuracy_stderr": 0.017877617858126905,
24
+ "f1": 0.6233437055359021,
25
+ "f1_stderr": 0.016670689583976186,
26
+ "main_score": 0.654203093476799
27
  }
28
  },
29
  "validation": {
results/text-embedding-ada-002/MassiveScenarioClassification.json CHANGED
@@ -17,6 +17,13 @@
17
  "f1": 0.7466678632951032,
18
  "f1_stderr": 0.01143026967111036,
19
  "main_score": 0.753261600537996
 
 
 
 
 
 
 
20
  }
21
  },
22
  "validation": {
 
17
  "f1": 0.7466678632951032,
18
  "f1_stderr": 0.01143026967111036,
19
  "main_score": 0.753261600537996
20
+ },
21
+ "fr": {
22
+ "accuracy": 0.7110961667787492,
23
+ "accuracy_stderr": 0.014012929591756285,
24
+ "f1": 0.7052417666719227,
25
+ "f1_stderr": 0.013563018661185906,
26
+ "main_score": 0.7110961667787492
27
  }
28
  },
29
  "validation": {
results/text-embedding-ada-002/STS22.json CHANGED
@@ -45,6 +45,20 @@
45
  "pearson": 0.6084112755525339,
46
  "spearman": 0.64498311459473
47
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
48
  }
49
  }
50
  }
 
45
  "pearson": 0.6084112755525339,
46
  "spearman": 0.64498311459473
47
  }
48
+ },
49
+ "fr": {
50
+ "cos_sim": {
51
+ "pearson": 0.7941725570177687,
52
+ "spearman": 0.810931445449386
53
+ },
54
+ "euclidean": {
55
+ "pearson": 0.8069952449579254,
56
+ "spearman": 0.810931445449386
57
+ },
58
+ "manhattan": {
59
+ "pearson": 0.8064946486457454,
60
+ "spearman": 0.8111529599734582
61
+ }
62
  }
63
  }
64
  }
results/xlm-roberta-base/MassiveIntentClassification.json CHANGED
@@ -24,6 +24,13 @@
24
  "f1": 0.4202125805260968,
25
  "f1_stderr": 0.010769282318607538,
26
  "main_score": 0.45117686617350367
 
 
 
 
 
 
 
27
  }
28
  },
29
  "validation": {
 
24
  "f1": 0.4202125805260968,
25
  "f1_stderr": 0.010769282318607538,
26
  "main_score": 0.45117686617350367
27
+ },
28
+ "fr": {
29
+ "accuracy": 0.13581035642232683,
30
+ "accuracy_stderr": 0.016491524857694173,
31
+ "f1": 0.14614446238627393,
32
+ "f1_stderr": 0.013927468848973678,
33
+ "main_score": 0.13581035642232683
34
  }
35
  },
36
  "validation": {
results/xlm-roberta-base/MassiveScenarioClassification.json CHANGED
@@ -24,6 +24,13 @@
24
  "f1": 0.46097644872017873,
25
  "f1_stderr": 0.022149804025228082,
26
  "main_score": 0.4735036987222595
 
 
 
 
 
 
 
27
  }
28
  },
29
  "validation": {
 
24
  "f1": 0.46097644872017873,
25
  "f1_stderr": 0.022149804025228082,
26
  "main_score": 0.4735036987222595
27
+ },
28
+ "fr": {
29
+ "accuracy": 0.23214525891055815,
30
+ "accuracy_stderr": 0.032492086366805804,
31
+ "f1": 0.2137516225585256,
32
+ "f1_stderr": 0.03576007230430259,
33
+ "main_score": 0.23214525891055815
34
  }
35
  },
36
  "validation": {