Files changed (1) hide show
  1. README.md +14 -14
README.md CHANGED
@@ -84,25 +84,25 @@ Currently, we are working on more suitable benchmarks for Spanish, French, Germa
84
 
85
  | | arc_challenge | belebele | hellaswag | mmlu | truthfulqa | avg |
86
  |:-------------------------------------|----------------:|-----------:|------------:|---------:|-------------:|---------:|
87
- | occiglot/occiglot-7b-eu5 | 0.530717 | 0.726667 | 0.789882 | 0.531904 | 0.403678 | 0.59657 |
88
- | occiglot/occiglot-7b-eu5-instruct | 0.558874 | 0.746667 | 0.799841 | 0.535109 | 0.449034 | 0.617905 |
89
- | occiglot/occiglot-7b-es-en | 0.543515 | 0.697778 | 0.788289 | 0.548355 | 0.390109 | 0.593609 |
90
- | occiglot/occiglot-7b-es-en-instruct | 0.552048 | 0.736667 | 0.797451 | 0.557328 | 0.435042 | 0.615707 |
91
- | clibrain/lince-mistral-7b-it-es | 0.624573 | 0.824444 | 0.838578 | 0.600627 | 0.433202 | 0.664285 |
92
- | mistralai/Mistral-7B-v0.1 | 0.612628 | 0.844444 | 0.834097 | 0.624555 | 0.426201 | 0.668385 |
93
- | mistralai/Mistral-7B-Instruct-v0.2 | 0.637372 | 0.824444 | 0.846345 | 0.59201 | 0.668116 | 0.713657 |
94
 
95
  ### Spanish
96
 
97
  | | arc_challenge_es | belebele_es | hellaswag_es | mmlu_es | truthfulqa_es | avg |
98
  |:-------------------------------------|-------------------:|--------------:|---------------:|----------:|----------------:|---------:|
99
- | occiglot/occiglot-7b-eu5 | 0.508547 | 0.676667 | 0.725411 | 0.499325 | 0.25602 | 0.533194 |
100
- | occiglot/occiglot-7b-eu5-instruct | 0.535043 | 0.68 | 0.737039 | 0.503525 | 0.285171 | 0.548155 |
101
- | occiglot/occiglot-7b-es-en | 0.529915 | 0.627778 | 0.72253 | 0.512749 | 0.243346 | 0.527264 |
102
- | occiglot/occiglot-7b-es-en-instruct | 0.545299 | 0.636667 | 0.734372 | 0.524374 | 0.257288 | 0.5396 |
103
- | clibrain/lince-mistral-7b-it-es | 0.52906 | 0.721111 | 0.687967 | 0.512749 | 0.285171 | 0.547212 |
104
- | mistralai/Mistral-7B-v0.1 | 0.528205 | 0.747778 | 0.672712 | 0.544023 | 0.281369 | 0.554817 |
105
- | mistralai/Mistral-7B-Instruct-v0.2 | 0.54188 | 0.73 | 0.685406 | 0.511699 | 0.373891 | 0.568575 |
106
 
107
 
108
 
 
84
 
85
  | | arc_challenge | belebele | hellaswag | mmlu | truthfulqa | avg |
86
  |:-------------------------------------|----------------:|-----------:|------------:|---------:|-------------:|---------:|
87
+ | Occiglot-7b-eu5 | 0.530717 | 0.726667 | 0.789882 | 0.531904 | 0.403678 | 0.59657 |
88
+ | Occiglot-7b-eu5-instruct | 0.558874 | 0.746667 | 0.799841 | 0.535109 | 0.449034 | 0.617905 |
89
+ | Occiglot-7b-es-en | 0.543515 | 0.697778 | 0.788289 | 0.548355 | 0.390109 | 0.593609 |
90
+ | Occiglot-7b-es-en-instruct | 0.552048 | 0.736667 | 0.797451 | 0.557328 | 0.435042 | 0.615707 |
91
+ | Clibrain/lince-mistral-7b-it-es | 0.624573 | 0.824444 | 0.838578 | 0.600627 | 0.433202 | 0.664285 |
92
+ | Mistral-7B-v0.1 | 0.612628 | 0.844444 | 0.834097 | 0.624555 | 0.426201 | 0.668385 |
93
+ | Mistral-7B-Instruct-v0.2 | 0.637372 | 0.824444 | 0.846345 | 0.59201 | 0.668116 | 0.713657 |
94
 
95
  ### Spanish
96
 
97
  | | arc_challenge_es | belebele_es | hellaswag_es | mmlu_es | truthfulqa_es | avg |
98
  |:-------------------------------------|-------------------:|--------------:|---------------:|----------:|----------------:|---------:|
99
+ | Occiglot-7b-eu5 | 0.508547 | 0.676667 | 0.725411 | 0.499325 | 0.25602 | 0.533194 |
100
+ | Occiglot-7b-eu5-instruct | 0.535043 | 0.68 | 0.737039 | 0.503525 | 0.285171 | 0.548155 |
101
+ | Occiglot-7b-es-en | 0.529915 | 0.627778 | 0.72253 | 0.512749 | 0.243346 | 0.527264 |
102
+ | Occiglot-7b-es-en-instruct | 0.545299 | 0.636667 | 0.734372 | 0.524374 | 0.257288 | 0.5396 |
103
+ | Clibrain/lince-mistral-7b-it-es | 0.52906 | 0.721111 | 0.687967 | 0.512749 | 0.285171 | 0.547212 |
104
+ | Mistral-7B-v0.1 | 0.528205 | 0.747778 | 0.672712 | 0.544023 | 0.281369 | 0.554817 |
105
+ | Mistral-7B-Instruct-v0.2 | 0.54188 | 0.73 | 0.685406 | 0.511699 | 0.373891 | 0.568575 |
106
 
107
 
108