mbrack barthfab commited on
Commit
2a28d05
1 Parent(s): 725cd80

Fix Table (#2)

Browse files

- Fix Table (8e4ad98f4e356d6b0259d76b36904cc7995ed945)


Co-authored-by: Fabio Barth <barthfab@users.noreply.huggingface.co>

Files changed (1) hide show
  1. README.md +14 -14
README.md CHANGED
@@ -84,26 +84,26 @@ Currently, we are working on more suitable benchmarks for Spanish, French, Germa
84
 
85
  | | arc_challenge | belebele | hellaswag | mmlu | truthfulqa | avg |
86
  |:-------------------------------------|----------------:|-----------:|------------:|---------:|-------------:|---------:|
87
- | occiglot/occiglot-7b-eu5 | 0.530717 | 0.726667 | 0.789882 | 0.531904 | 0.403678 | 0.59657 |
88
- | occiglot/occiglot-7b-eu5-instruct | 0.558874 | 0.746667 | 0.799841 | 0.535109 | 0.449034 | 0.617905 |
89
- | occiglot/occiglot-7b-fr-en | 0.568259 | 0.771111 | 0.804919 | 0.570716 | 0.394726 | 0.621947 |
90
- | occiglot/occiglot-7b-fr-en-instruct | 0.586177 | 0.794444 | 0.808305 | 0.569862 | 0.474064 | 0.646571 |
91
- | OpenLLM-France/Claire-Mistral-7B-0.1 | 0.59727 | 0.817778 | 0.827126 | 0.600912 | 0.415906 | 0.651798 |
92
- | mistralai/Mistral-7B-v0.1 | 0.612628 | 0.844444 | 0.834097 | 0.624555 | 0.426201 | 0.668385 |
93
- | mistralai/Mistral-7B-Instruct-v0.2 | 0.637372 | 0.824444 | 0.846345 | 0.59201 | 0.668116 | 0.713657 |
94
 
95
 
96
  ### French
97
 
98
  | | arc_challenge_fr | belebele_fr | hellaswag_fr | mmlu_fr | truthfulqa_fr | avg |
99
  |:-------------------------------------|-------------------:|--------------:|---------------:|----------:|----------------:|---------:|
100
- | occiglot/occiglot-7b-eu5 | 0.506416 | 0.675556 | 0.712358 | 0.495684 | 0.23507 | 0.525017 |
101
- | occiglot/occiglot-7b-eu5-instruct | 0.541488 | 0.7 | 0.724245 | 0.499122 | 0.306226 | 0.554216 |
102
- | occiglot/occiglot-7b-fr-en | 0.532934 | 0.706667 | 0.718891 | 0.51333 | 0.242694 | 0.542903 |
103
- | occiglot/occiglot-7b-fr-en-instruct | 0.542344 | 0.752222 | 0.72553 | 0.52051 | 0.29479 | 0.567079 |
104
- | OpenLLM-France/Claire-Mistral-7B-0.1 | 0.486741 | 0.694444 | 0.642964 | 0.479566 | 0.271919 | 0.515127 |
105
- | mistralai/Mistral-7B-v0.1 | 0.525235 | 0.776667 | 0.66481 | 0.543121 | 0.280813 | 0.558129 |
106
- | mistralai/Mistral-7B-Instruct-v0.2 | 0.551754 | 0.758889 | 0.67916 | 0.506837 | 0.382465 | 0.575821 |
107
 
108
  </details>
109
 
 
84
 
85
  | | arc_challenge | belebele | hellaswag | mmlu | truthfulqa | avg |
86
  |:-------------------------------------|----------------:|-----------:|------------:|---------:|-------------:|---------:|
87
+ | Occiglot-7b-eu5 | 0.530717 | 0.726667 | 0.789882 | 0.531904 | 0.403678 | 0.59657 |
88
+ | Occiglot-7b-eu5-instruct | 0.558874 | 0.746667 | 0.799841 | 0.535109 | 0.449034 | 0.617905 |
89
+ | Occiglot-7b-fr-en | 0.568259 | 0.771111 | 0.804919 | 0.570716 | 0.394726 | 0.621947 |
90
+ | Occiglot-7b-fr-en-instruct | 0.586177 | 0.794444 | 0.808305 | 0.569862 | 0.474064 | 0.646571 |
91
+ | Claire-Mistral-7B-0.1 | 0.59727 | 0.817778 | 0.827126 | 0.600912 | 0.415906 | 0.651798 |
92
+ | Mistral-7B-v0.1 | 0.612628 | 0.844444 | 0.834097 | 0.624555 | 0.426201 | 0.668385 |
93
+ | Mistral-7B-Instruct-v0.2 | 0.637372 | 0.824444 | 0.846345 | 0.59201 | 0.668116 | 0.713657 |
94
 
95
 
96
  ### French
97
 
98
  | | arc_challenge_fr | belebele_fr | hellaswag_fr | mmlu_fr | truthfulqa_fr | avg |
99
  |:-------------------------------------|-------------------:|--------------:|---------------:|----------:|----------------:|---------:|
100
+ | Occiglot-7b-eu5 | 0.506416 | 0.675556 | 0.712358 | 0.495684 | 0.23507 | 0.525017 |
101
+ | Occiglot-7b-eu5-instruct | 0.541488 | 0.7 | 0.724245 | 0.499122 | 0.306226 | 0.554216 |
102
+ | Occiglot-7b-fr-en | 0.532934 | 0.706667 | 0.718891 | 0.51333 | 0.242694 | 0.542903 |
103
+ | Occiglot-7b-fr-en-instruct | 0.542344 | 0.752222 | 0.72553 | 0.52051 | 0.29479 | 0.567079 |
104
+ | Claire-Mistral-7B-0.1 | 0.486741 | 0.694444 | 0.642964 | 0.479566 | 0.271919 | 0.515127 |
105
+ | Mistral-7B-v0.1 | 0.525235 | 0.776667 | 0.66481 | 0.543121 | 0.280813 | 0.558129 |
106
+ | Mistral-7B-Instruct-v0.2 | 0.551754 | 0.758889 | 0.67916 | 0.506837 | 0.382465 | 0.575821 |
107
 
108
  </details>
109