barthfab commited on
Commit
41c9899
1 Parent(s): 50cc49b
Files changed (1) hide show
  1. README.md +14 -14
README.md CHANGED
@@ -96,26 +96,26 @@ Currently, we are working on more suitable benchmarks for Spanish, French, Germa
96
 
97
  | | arc_challenge | belebele | hellaswag | mmlu | truthfulqa | avg |
98
  |:-------------------------------------|----------------:|-----------:|------------:|---------:|-------------:|---------:|
99
- | occiglot/occiglot-7b-eu5 | 0.530717 | 0.726667 | 0.789882 | 0.531904 | 0.403678 | 0.59657 |
100
- | occiglot/occiglot-7b-eu5-instruct | 0.558874 | 0.746667 | 0.799841 | 0.535109 | 0.449034 | 0.617905 |
101
- | occiglot/occiglot-7b-de-en | 0.556314 | 0.791111 | 0.803824 | 0.568438 | 0.423251 | 0.628587 |
102
- | occiglot/occiglot-7b-de-en-instruct | 0.604096 | 0.812222 | 0.80004 | 0.570574 | 0.493807 | 0.656148 |
103
- | LeoLM/leo-mistral-hessianai-7b | 0.522184 | 0.736667 | 0.777833 | 0.538812 | 0.429248 | 0.600949 |
104
- | mistralai/Mistral-7B-v0.1 | 0.612628 | 0.844444 | 0.834097 | 0.624555 | 0.426201 | 0.668385 |
105
- | mistralai/Mistral-7B-Instruct-v0.2 | 0.637372 | 0.824444 | 0.846345 | 0.59201 | 0.668116 | 0.713657 |
106
 
107
 
108
  ### German
109
 
110
  | | arc_challenge_de | belebele_de | hellaswag_de | mmlu_de | truthfulqa_de | avg |
111
  |:-------------------------------------|-------------------:|--------------:|---------------:|----------:|----------------:|---------:|
112
- | occiglot/occiglot-7b-eu5 | 0.493584 | 0.646667 | 0.666631 | 0.483406 | 0.251269 | 0.508311 |
113
- | occiglot/occiglot-7b-eu5-instruct | 0.529512 | 0.667778 | 0.685205 | 0.488234 | 0.286802 | 0.531506 |
114
- | occiglot/occiglot-7b-de-en | 0.50556 | 0.743333 | 0.67421 | 0.514633 | 0.26269 | 0.540085 |
115
- | occiglot/occiglot-7b-de-en-instruct | 0.54491 | 0.772222 | 0.688407 | 0.515915 | 0.310914 | 0.566474 |
116
- | LeoLM/leo-mistral-hessianai-7b | 0.474765 | 0.691111 | 0.682109 | 0.488309 | 0.252538 | 0.517766 |
117
- | mistralai/Mistral-7B-v0.1 | 0.476476 | 0.738889 | 0.610589 | 0.529567 | 0.284264 | 0.527957 |
118
- | mistralai/Mistral-7B-Instruct-v0.2 | 0.485885 | 0.688889 | 0.622438 | 0.501961 | 0.376904 | 0.535215 |
119
 
120
  </details>
121
 
 
96
 
97
  | | arc_challenge | belebele | hellaswag | mmlu | truthfulqa | avg |
98
  |:-------------------------------------|----------------:|-----------:|------------:|---------:|-------------:|---------:|
99
+ | Occiglot-7b-eu5 | 0.530717 | 0.726667 | 0.789882 | 0.531904 | 0.403678 | 0.59657 |
100
+ | Occiglot-7b-eu5-instruct | 0.558874 | 0.746667 | 0.799841 | 0.535109 | 0.449034 | 0.617905 |
101
+ | Occiglot-7b-de-en | 0.556314 | 0.791111 | 0.803824 | 0.568438 | 0.423251 | 0.628587 |
102
+ | Occiglot-7b-de-en-instruct | 0.604096 | 0.812222 | 0.80004 | 0.570574 | 0.493807 | 0.656148 |
103
+ | Leo-mistral-hessianai-7b | 0.522184 | 0.736667 | 0.777833 | 0.538812 | 0.429248 | 0.600949 |
104
+ | Mistral-7B-v0.1 | 0.612628 | 0.844444 | 0.834097 | 0.624555 | 0.426201 | 0.668385 |
105
+ | Mistral-7B-Instruct-v0.2 | 0.637372 | 0.824444 | 0.846345 | 0.59201 | 0.668116 | 0.713657 |
106
 
107
 
108
  ### German
109
 
110
  | | arc_challenge_de | belebele_de | hellaswag_de | mmlu_de | truthfulqa_de | avg |
111
  |:-------------------------------------|-------------------:|--------------:|---------------:|----------:|----------------:|---------:|
112
+ | Occiglot-7b-eu5 | 0.493584 | 0.646667 | 0.666631 | 0.483406 | 0.251269 | 0.508311 |
113
+ | Occiglot-7b-eu5-instruct | 0.529512 | 0.667778 | 0.685205 | 0.488234 | 0.286802 | 0.531506 |
114
+ | Occiglot-7b-de-en | 0.50556 | 0.743333 | 0.67421 | 0.514633 | 0.26269 | 0.540085 |
115
+ | Occiglot-7b-de-en-instruct | 0.54491 | 0.772222 | 0.688407 | 0.515915 | 0.310914 | 0.566474 |
116
+ | Leo-mistral-hessianai-7b | 0.474765 | 0.691111 | 0.682109 | 0.488309 | 0.252538 | 0.517766 |
117
+ | Mistral-7B-v0.1 | 0.476476 | 0.738889 | 0.610589 | 0.529567 | 0.284264 | 0.527957 |
118
+ | Mistral-7B-Instruct-v0.2 | 0.485885 | 0.688889 | 0.622438 | 0.501961 | 0.376904 | 0.535215 |
119
 
120
  </details>
121