picocreator commited on
Commit
ded8a93
1 Parent(s): 4ecf101

updated results build

Browse files
compile-results.ipynb CHANGED
@@ -2,7 +2,7 @@
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
- "execution_count": 47,
6
  "metadata": {},
7
  "outputs": [
8
  {
@@ -11,10 +11,10 @@
11
  "text": [
12
  "Defaulting to user installation because normal site-packages is not writeable\n",
13
  "Requirement already satisfied: pandas in /Users/picocreator/Library/Python/3.9/lib/python/site-packages (2.2.0)\n",
14
- "Requirement already satisfied: tzdata>=2022.7 in /Users/picocreator/Library/Python/3.9/lib/python/site-packages (from pandas) (2024.1)\n",
15
- "Requirement already satisfied: python-dateutil>=2.8.2 in /Users/picocreator/Library/Python/3.9/lib/python/site-packages (from pandas) (2.8.2)\n",
16
- "Requirement already satisfied: pytz>=2020.1 in /Users/picocreator/Library/Python/3.9/lib/python/site-packages (from pandas) (2024.1)\n",
17
  "Requirement already satisfied: numpy<2,>=1.22.4 in /Users/picocreator/Library/Python/3.9/lib/python/site-packages (from pandas) (1.26.1)\n",
 
 
 
18
  "Requirement already satisfied: six>=1.5 in /Library/Developer/CommandLineTools/Library/Frameworks/Python3.framework/Versions/3.9/lib/python3.9/site-packages (from python-dateutil>=2.8.2->pandas) (1.15.0)\n",
19
  "\u001b[33mWARNING: You are using pip version 21.2.4; however, version 24.0 is available.\n",
20
  "You should consider upgrading via the '/Library/Developer/CommandLineTools/usr/bin/python3 -m pip install --upgrade pip' command.\u001b[0m\n"
@@ -36,14 +36,14 @@
36
  },
37
  {
38
  "cell_type": "code",
39
- "execution_count": 48,
40
  "metadata": {},
41
  "outputs": [
42
  {
43
  "name": "stdout",
44
  "output_type": "stream",
45
  "text": [
46
- "Found 2217 results.json files\n"
47
  ]
48
  }
49
  ],
@@ -71,14 +71,14 @@
71
  },
72
  {
73
  "cell_type": "code",
74
- "execution_count": 49,
75
  "metadata": {},
76
  "outputs": [
77
  {
78
  "name": "stdout",
79
  "output_type": "stream",
80
  "text": [
81
- "Processed example: {'name': 'bigscience/bloom-7b1', 'config': {'dtype=bfloat16,trust_remote_code=True': {'confStr': 'dtype=bfloat16,trust_remote_code=True', 'confObj': {'dtype': 'bfloat16', 'trust_remote_code': 'True'}, 'results': {'xcopa': {'acc,none': 0.5709090909090908, 'acc_stderr,none': 0.06135942275478038, 'alias': 'xcopa'}, 'xcopa_et': {'acc,none': 0.482, 'acc_stderr,none': 0.02236856511738799, 'alias': ' - xcopa_et'}, 'xcopa_ht': {'acc,none': 0.516, 'acc_stderr,none': 0.0223716109825804, 'alias': ' - xcopa_ht'}, 'xcopa_id': {'acc,none': 0.702, 'acc_stderr,none': 0.020475118092988978, 'alias': ' - xcopa_id'}, 'xcopa_it': {'acc,none': 0.528, 'acc_stderr,none': 0.022347949832668093, 'alias': ' - xcopa_it'}, 'xcopa_qu': {'acc,none': 0.512, 'acc_stderr,none': 0.02237662679792717, 'alias': ' - xcopa_qu'}, 'xcopa_sw': {'acc,none': 0.518, 'acc_stderr,none': 0.02236856511738799, 'alias': ' - xcopa_sw'}, 'xcopa_ta': {'acc,none': 0.592, 'acc_stderr,none': 0.02200091089387719, 'alias': ' - xcopa_ta'}, 'xcopa_th': {'acc,none': 0.552, 'acc_stderr,none': 0.022261697292270132, 'alias': ' - xcopa_th'}, 'xcopa_tr': {'acc,none': 0.512, 'acc_stderr,none': 0.02237662679792717, 'alias': ' - xcopa_tr'}, 'xcopa_vi': {'acc,none': 0.716, 'acc_stderr,none': 0.02018670369357085, 'alias': ' - xcopa_vi'}, 'xcopa_zh': {'acc,none': 0.65, 'acc_stderr,none': 0.021352091786223104, 'alias': ' - xcopa_zh'}, 'xnli': {'acc,none': 0.41204819277108434, 'acc_stderr,none': 0.051535476594892576, 'alias': 'xnli'}, 'xnli_ar': {'acc,none': 0.3377510040160643, 'acc_stderr,none': 0.009479742273956477, 'alias': ' - xnli_ar'}, 'xnli_bg': {'acc,none': 0.3779116465863454, 'acc_stderr,none': 0.009718712281227459, 'alias': ' - xnli_bg'}, 'xnli_de': {'acc,none': 0.41365461847389556, 'acc_stderr,none': 0.009871502159099368, 'alias': ' - xnli_de'}, 'xnli_el': {'acc,none': 0.3650602409638554, 'acc_stderr,none': 0.009650194822749637, 'alias': ' - xnli_el'}, 'xnli_en': {'acc,none': 0.5261044176706827, 'acc_stderr,none': 0.01000840465166064, 'alias': ' - xnli_en'}, 'xnli_es': {'acc,none': 0.4879518072289157, 'acc_stderr,none': 0.010019162857624487, 'alias': ' - xnli_es'}, 'xnli_fr': {'acc,none': 0.478714859437751, 'acc_stderr,none': 0.010012987604500423, 'alias': ' - xnli_fr'}, 'xnli_hi': {'acc,none': 0.4666666666666667, 'acc_stderr,none': 0.00999977679318763, 'alias': ' - xnli_hi'}, 'xnli_ru': {'acc,none': 0.43253012048192774, 'acc_stderr,none': 0.009930409027139453, 'alias': ' - xnli_ru'}, 'xnli_sw': {'acc,none': 0.3855421686746988, 'acc_stderr,none': 0.009755949341224318, 'alias': ' - xnli_sw'}, 'xnli_th': {'acc,none': 0.3437751004016064, 'acc_stderr,none': 0.009520310502882936, 'alias': ' - xnli_th'}, 'xnli_tr': {'acc,none': 0.3522088353413655, 'acc_stderr,none': 0.009574259292495757, 'alias': ' - xnli_tr'}, 'xnli_ur': {'acc,none': 0.42289156626506025, 'acc_stderr,none': 0.009902179034797438, 'alias': ' - xnli_ur'}, 'xnli_vi': {'acc,none': 0.44497991967871486, 'acc_stderr,none': 0.009961210239024633, 'alias': ' - xnli_vi'}, 'xnli_zh': {'acc,none': 0.3449799196787149, 'acc_stderr,none': 0.009528219800053311, 'alias': ' - xnli_zh'}, 'pawsx': {'acc,none': 0.5078571428571429, 'acc_stderr,none': 0.03988534011535243, 'alias': 'pawsx'}, 'paws_de': {'acc,none': 0.5175, 'acc_stderr,none': 0.011176284251254179, 'alias': ' - paws_de'}, 'paws_en': {'acc,none': 0.4145, 'acc_stderr,none': 0.011018419931591767, 'alias': ' - paws_en'}, 'paws_es': {'acc,none': 0.437, 'acc_stderr,none': 0.011094009127418984, 'alias': ' - paws_es'}, 'paws_fr': {'acc,none': 0.5435, 'acc_stderr,none': 0.011140733053371404, 'alias': ' - paws_fr'}, 'paws_ja': {'acc,none': 0.5575, 'acc_stderr,none': 0.01110894141174761, 'alias': ' - paws_ja'}, 'paws_ko': {'acc,none': 0.552, 'acc_stderr,none': 0.011122493197456285, 'alias': ' - paws_ko'}, 'paws_zh': {'acc,none': 0.533, 'acc_stderr,none': 0.01115875256825067, 'alias': ' - paws_zh'}, 'lambada_multilingual': {'perplexity,none': 131.45396740665825, 'perplexity_stderr,none': 95.28024178884175, 'acc,none': 0.38490199883562976, 'acc_stderr,none': 0.07608898792977997, 'alias': 'lambada_multilingual'}, 'lambada_openai_mt_de': {'perplexity,none': 370.91952810475857, 'perplexity_stderr,none': 24.98299339282566, 'acc,none': 0.23015718998641568, 'acc_stderr,none': 0.0058644241714399855, 'alias': ' - lambada_openai_mt_de'}, 'lambada_openai_mt_en': {'perplexity,none': 6.583236525584539, 'perplexity_stderr,none': 0.17481189179976453, 'acc,none': 0.5717058024451775, 'acc_stderr,none': 0.0068939712541951454, 'alias': ' - lambada_openai_mt_en'}, 'lambada_openai_mt_es': {'perplexity,none': 51.02874715706533, 'perplexity_stderr,none': 2.6341920857292744, 'acc,none': 0.36638851154667185, 'acc_stderr,none': 0.0067126579546010565, 'alias': ' - lambada_openai_mt_es'}, 'lambada_openai_mt_fr': {'perplexity,none': 29.56217917543056, 'perplexity_stderr,none': 1.5411073949753211, 'acc,none': 0.4513875412381137, 'acc_stderr,none': 0.0069329758883686235, 'alias': ' - lambada_openai_mt_fr'}, 'lambada_openai_mt_it': {'perplexity,none': 199.1761460704524, 'perplexity_stderr,none': 13.648756866456297, 'acc,none': 0.30487094896176986, 'acc_stderr,none': 0.006413613926848421, 'alias': ' - lambada_openai_mt_it'}, 'xwinograd': {'acc,none': 0.7442121825129242, 'acc_stderr,none': 0.06414679137553342, 'alias': 'xwinograd'}, 'xwinograd_en': {'acc,none': 0.8219354838709677, 'acc_stderr,none': 0.007935777723887321, 'alias': ' - xwinograd_en'}, 'xwinograd_fr': {'acc,none': 0.6987951807228916, 'acc_stderr,none': 0.0506639425494172, 'alias': ' - xwinograd_fr'}, 'xwinograd_jp': {'acc,none': 0.6037539103232534, 'acc_stderr,none': 0.015802642616557255, 'alias': ' - xwinograd_jp'}, 'xwinograd_pt': {'acc,none': 0.7680608365019012, 'acc_stderr,none': 0.026075593860304693, 'alias': ' - xwinograd_pt'}, 'xwinograd_ru': {'acc,none': 0.5714285714285714, 'acc_stderr,none': 0.02792722339076032, 'alias': ' - xwinograd_ru'}, 'xwinograd_zh': {'acc,none': 0.7559523809523809, 'acc_stderr,none': 0.01915139944664688, 'alias': ' - xwinograd_zh'}, 'xstorycloze': {'acc,none': 0.5927441188857469, 'acc_stderr,none': 0.05262352730974911, 'alias': 'xstorycloze'}, 'xstorycloze_ar': {'acc,none': 0.5883520847121112, 'acc_stderr,none': 0.01266464832921408, 'alias': ' - xstorycloze_ar'}, 'xstorycloze_en': {'acc,none': 0.7081403044341495, 'acc_stderr,none': 0.01169925603764938, 'alias': ' - xstorycloze_en'}, 'xstorycloze_es': {'acc,none': 0.6598279285241562, 'acc_stderr,none': 0.012192034998028832, 'alias': ' - xstorycloze_es'}, 'xstorycloze_eu': {'acc,none': 0.57114493712773, 'acc_stderr,none': 0.012736202713147777, 'alias': ' - xstorycloze_eu'}, 'xstorycloze_hi': {'acc,none': 0.6048974189278623, 'acc_stderr,none': 0.012580772976133262, 'alias': ' - xstorycloze_hi'}, 'xstorycloze_id': {'acc,none': 0.6419589675711449, 'acc_stderr,none': 0.012337624883487575, 'alias': ' - xstorycloze_id'}, 'xstorycloze_my': {'acc,none': 0.48378557246856385, 'acc_stderr,none': 0.012860357805055867, 'alias': ' - xstorycloze_my'}, 'xstorycloze_ru': {'acc,none': 0.5268034414295168, 'acc_stderr,none': 0.012848623899505765, 'alias': ' - xstorycloze_ru'}, 'xstorycloze_sw': {'acc,none': 0.5413633355393779, 'acc_stderr,none': 0.012823020340169822, 'alias': ' - xstorycloze_sw'}, 'xstorycloze_te': {'acc,none': 0.5744540039708802, 'acc_stderr,none': 0.012723670419166326, 'alias': ' - xstorycloze_te'}, 'xstorycloze_zh': {'acc,none': 0.6194573130377233, 'acc_stderr,none': 0.012494500786685344, 'alias': ' - xstorycloze_zh'}}, 'groups': {'xcopa': {'acc,none': 0.5709090909090908, 'acc_stderr,none': 0.06135942275478038, 'alias': 'xcopa'}, 'xnli': {'acc,none': 0.41204819277108434, 'acc_stderr,none': 0.051535476594892576, 'alias': 'xnli'}, 'pawsx': {'acc,none': 0.5078571428571429, 'acc_stderr,none': 0.03988534011535243, 'alias': 'pawsx'}, 'lambada_multilingual': {'perplexity,none': 131.45396740665825, 'perplexity_stderr,none': 95.28024178884175, 'acc,none': 0.38490199883562976, 'acc_stderr,none': 0.07608898792977997, 'alias': 'lambada_multilingual'}, 'xwinograd': {'acc,none': 0.7442121825129242, 'acc_stderr,none': 0.06414679137553342, 'alias': 'xwinograd'}, 'xstorycloze': {'acc,none': 0.5927441188857469, 'acc_stderr,none': 0.05262352730974911, 'alias': 'xstorycloze'}}}}}\n"
82
  ]
83
  }
84
  ],
@@ -156,16 +156,16 @@
156
  },
157
  {
158
  "cell_type": "code",
159
- "execution_count": 50,
160
  "metadata": {},
161
  "outputs": [
162
  {
163
  "name": "stdout",
164
  "output_type": "stream",
165
  "text": [
166
- "Found 36 models\n",
167
  "Models: \n",
168
- "['bigscience/bloom-7b1', 'togethercomputer/RedPajama-INCITE-7B-Base', 'mistralai/Mistral-7B-v0.1', 'mosaicml/mpt-7b-instruct', 'mosaicml/mpt-7b', 'mosaicml/mpt-7b-chat', 'bigscience/bloomz-7b1-mt', 'bigscience/bloomz-7b1', 'EleutherAI/pythia-2.8b', 'EleutherAI/pythia-1.4b', 'EleutherAI/gpt-j-6b', 'EleutherAI/pythia-6.9b', 'microsoft/phi-1_5', 'microsoft/phi-2', 'microsoft/phi-1', 'allenai/OLMo-7B', 'TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T', 'TinyLlama/TinyLlama-1.1B-Chat-v1.0', 'RWKV/rwkv-5-world-1b5', 'RWKV/rwkv-5-world-3b', 'RWKV/rwkv-4-world-3b', 'RWKV/rwkv-4-world-1b5', 'RWKV/rwkv-4-world-7b', 'RWKV/HF_v5-Eagle-7B', 'togethercomputer/RedPajama-INCITE-7B-Instruct', 'togethercomputer/RedPajama-INCITE-7B-Chat', 'facebook/opt-2.7b', 'facebook/opt-6.7b', 'facebook/opt-1.3b', 'tiiuae/falcon-7b-instruct', 'tiiuae/falcon-rw-1b', 'tiiuae/falcon-rw-7b', 'tiiuae/falcon-7b', 'huggyllama/llama-7b', 'meta-llama/Llama-2-7b-chat-hf', 'meta-llama/Llama-2-7b-hf']\n",
169
  "Saved to compiled-lm-eval-results.json\n"
170
  ]
171
  }
@@ -199,7 +199,7 @@
199
  },
200
  {
201
  "cell_type": "code",
202
- "execution_count": 56,
203
  "metadata": {},
204
  "outputs": [
205
  {
@@ -233,22 +233,6 @@
233
  " <tbody>\n",
234
  " <tr>\n",
235
  " <th>0</th>\n",
236
- " <td>bigscience/bloom-7b1</td>\n",
237
- " <td>0.570909</td>\n",
238
- " <td>0.061359</td>\n",
239
- " <td>0.570909</td>\n",
240
- " <td>0.061359</td>\n",
241
- " </tr>\n",
242
- " <tr>\n",
243
- " <th>1</th>\n",
244
- " <td>togethercomputer/RedPajama-INCITE-7B-Base</td>\n",
245
- " <td>0.525455</td>\n",
246
- " <td>0.036407</td>\n",
247
- " <td>0.525455</td>\n",
248
- " <td>0.036407</td>\n",
249
- " </tr>\n",
250
- " <tr>\n",
251
- " <th>2</th>\n",
252
  " <td>mistralai/Mistral-7B-v0.1</td>\n",
253
  " <td>0.558727</td>\n",
254
  " <td>0.055164</td>\n",
@@ -256,7 +240,7 @@
256
  " <td>0.055164</td>\n",
257
  " </tr>\n",
258
  " <tr>\n",
259
- " <th>3</th>\n",
260
  " <td>mosaicml/mpt-7b-instruct</td>\n",
261
  " <td>0.537091</td>\n",
262
  " <td>0.041919</td>\n",
@@ -264,7 +248,7 @@
264
  " <td>0.041919</td>\n",
265
  " </tr>\n",
266
  " <tr>\n",
267
- " <th>4</th>\n",
268
  " <td>mosaicml/mpt-7b</td>\n",
269
  " <td>0.536000</td>\n",
270
  " <td>0.042339</td>\n",
@@ -272,7 +256,7 @@
272
  " <td>0.042339</td>\n",
273
  " </tr>\n",
274
  " <tr>\n",
275
- " <th>5</th>\n",
276
  " <td>mosaicml/mpt-7b-chat</td>\n",
277
  " <td>0.538000</td>\n",
278
  " <td>0.047059</td>\n",
@@ -280,7 +264,15 @@
280
  " <td>0.047059</td>\n",
281
  " </tr>\n",
282
  " <tr>\n",
283
- " <th>6</th>\n",
 
 
 
 
 
 
 
 
284
  " <td>bigscience/bloomz-7b1-mt</td>\n",
285
  " <td>0.546000</td>\n",
286
  " <td>0.038321</td>\n",
@@ -288,7 +280,7 @@
288
  " <td>0.038321</td>\n",
289
  " </tr>\n",
290
  " <tr>\n",
291
- " <th>7</th>\n",
292
  " <td>bigscience/bloomz-7b1</td>\n",
293
  " <td>0.547818</td>\n",
294
  " <td>0.038920</td>\n",
@@ -296,7 +288,7 @@
296
  " <td>0.038920</td>\n",
297
  " </tr>\n",
298
  " <tr>\n",
299
- " <th>8</th>\n",
300
  " <td>EleutherAI/pythia-2.8b</td>\n",
301
  " <td>0.537455</td>\n",
302
  " <td>0.026941</td>\n",
@@ -304,7 +296,7 @@
304
  " <td>0.026941</td>\n",
305
  " </tr>\n",
306
  " <tr>\n",
307
- " <th>9</th>\n",
308
  " <td>EleutherAI/pythia-1.4b</td>\n",
309
  " <td>0.526545</td>\n",
310
  " <td>0.027441</td>\n",
@@ -312,7 +304,7 @@
312
  " <td>0.027441</td>\n",
313
  " </tr>\n",
314
  " <tr>\n",
315
- " <th>10</th>\n",
316
  " <td>EleutherAI/gpt-j-6b</td>\n",
317
  " <td>0.544182</td>\n",
318
  " <td>0.034404</td>\n",
@@ -320,7 +312,7 @@
320
  " <td>0.034404</td>\n",
321
  " </tr>\n",
322
  " <tr>\n",
323
- " <th>11</th>\n",
324
  " <td>EleutherAI/pythia-6.9b</td>\n",
325
  " <td>0.540545</td>\n",
326
  " <td>0.029689</td>\n",
@@ -328,7 +320,23 @@
328
  " <td>0.029689</td>\n",
329
  " </tr>\n",
330
  " <tr>\n",
 
 
 
 
 
 
 
 
331
  " <th>12</th>\n",
 
 
 
 
 
 
 
 
332
  " <td>microsoft/phi-1_5</td>\n",
333
  " <td>0.521636</td>\n",
334
  " <td>0.026198</td>\n",
@@ -336,7 +344,7 @@
336
  " <td>0.026198</td>\n",
337
  " </tr>\n",
338
  " <tr>\n",
339
- " <th>13</th>\n",
340
  " <td>microsoft/phi-2</td>\n",
341
  " <td>0.512182</td>\n",
342
  " <td>0.029742</td>\n",
@@ -344,7 +352,7 @@
344
  " <td>0.029742</td>\n",
345
  " </tr>\n",
346
  " <tr>\n",
347
- " <th>14</th>\n",
348
  " <td>microsoft/phi-1</td>\n",
349
  " <td>0.517636</td>\n",
350
  " <td>0.029612</td>\n",
@@ -352,7 +360,7 @@
352
  " <td>0.029612</td>\n",
353
  " </tr>\n",
354
  " <tr>\n",
355
- " <th>15</th>\n",
356
  " <td>allenai/OLMo-7B</td>\n",
357
  " <td>0.537818</td>\n",
358
  " <td>0.034147</td>\n",
@@ -360,7 +368,7 @@
360
  " <td>0.034147</td>\n",
361
  " </tr>\n",
362
  " <tr>\n",
363
- " <th>16</th>\n",
364
  " <td>TinyLlama/TinyLlama-1.1B-intermediate-step-143...</td>\n",
365
  " <td>0.529273</td>\n",
366
  " <td>0.029316</td>\n",
@@ -368,7 +376,7 @@
368
  " <td>0.029316</td>\n",
369
  " </tr>\n",
370
  " <tr>\n",
371
- " <th>17</th>\n",
372
  " <td>TinyLlama/TinyLlama-1.1B-Chat-v1.0</td>\n",
373
  " <td>0.528909</td>\n",
374
  " <td>0.031702</td>\n",
@@ -376,7 +384,7 @@
376
  " <td>0.031702</td>\n",
377
  " </tr>\n",
378
  " <tr>\n",
379
- " <th>18</th>\n",
380
  " <td>RWKV/rwkv-5-world-1b5</td>\n",
381
  " <td>0.578909</td>\n",
382
  " <td>0.045103</td>\n",
@@ -384,7 +392,7 @@
384
  " <td>0.045103</td>\n",
385
  " </tr>\n",
386
  " <tr>\n",
387
- " <th>19</th>\n",
388
  " <td>RWKV/rwkv-5-world-3b</td>\n",
389
  " <td>0.590182</td>\n",
390
  " <td>0.056241</td>\n",
@@ -392,7 +400,7 @@
392
  " <td>0.056241</td>\n",
393
  " </tr>\n",
394
  " <tr>\n",
395
- " <th>20</th>\n",
396
  " <td>RWKV/rwkv-4-world-3b</td>\n",
397
  " <td>0.575455</td>\n",
398
  " <td>0.040977</td>\n",
@@ -400,7 +408,7 @@
400
  " <td>0.040977</td>\n",
401
  " </tr>\n",
402
  " <tr>\n",
403
- " <th>21</th>\n",
404
  " <td>RWKV/rwkv-4-world-1b5</td>\n",
405
  " <td>0.554000</td>\n",
406
  " <td>0.039406</td>\n",
@@ -408,7 +416,7 @@
408
  " <td>0.039406</td>\n",
409
  " </tr>\n",
410
  " <tr>\n",
411
- " <th>22</th>\n",
412
  " <td>RWKV/rwkv-4-world-7b</td>\n",
413
  " <td>0.601455</td>\n",
414
  " <td>0.053116</td>\n",
@@ -416,7 +424,7 @@
416
  " <td>0.053116</td>\n",
417
  " </tr>\n",
418
  " <tr>\n",
419
- " <th>23</th>\n",
420
  " <td>RWKV/HF_v5-Eagle-7B</td>\n",
421
  " <td>0.621818</td>\n",
422
  " <td>0.068986</td>\n",
@@ -424,7 +432,15 @@
424
  " <td>0.068986</td>\n",
425
  " </tr>\n",
426
  " <tr>\n",
427
- " <th>24</th>\n",
 
 
 
 
 
 
 
 
428
  " <td>togethercomputer/RedPajama-INCITE-7B-Instruct</td>\n",
429
  " <td>0.528545</td>\n",
430
  " <td>0.036470</td>\n",
@@ -432,7 +448,7 @@
432
  " <td>0.036470</td>\n",
433
  " </tr>\n",
434
  " <tr>\n",
435
- " <th>25</th>\n",
436
  " <td>togethercomputer/RedPajama-INCITE-7B-Chat</td>\n",
437
  " <td>0.535455</td>\n",
438
  " <td>0.038723</td>\n",
@@ -440,7 +456,7 @@
440
  " <td>0.038723</td>\n",
441
  " </tr>\n",
442
  " <tr>\n",
443
- " <th>26</th>\n",
444
  " <td>facebook/opt-2.7b</td>\n",
445
  " <td>0.521818</td>\n",
446
  " <td>0.029821</td>\n",
@@ -448,7 +464,7 @@
448
  " <td>0.029821</td>\n",
449
  " </tr>\n",
450
  " <tr>\n",
451
- " <th>27</th>\n",
452
  " <td>facebook/opt-6.7b</td>\n",
453
  " <td>0.522909</td>\n",
454
  " <td>0.027216</td>\n",
@@ -456,7 +472,7 @@
456
  " <td>0.027216</td>\n",
457
  " </tr>\n",
458
  " <tr>\n",
459
- " <th>28</th>\n",
460
  " <td>facebook/opt-1.3b</td>\n",
461
  " <td>0.521818</td>\n",
462
  " <td>0.029112</td>\n",
@@ -464,7 +480,7 @@
464
  " <td>0.029112</td>\n",
465
  " </tr>\n",
466
  " <tr>\n",
467
- " <th>29</th>\n",
468
  " <td>tiiuae/falcon-7b-instruct</td>\n",
469
  " <td>0.536727</td>\n",
470
  " <td>0.053430</td>\n",
@@ -472,7 +488,7 @@
472
  " <td>0.053430</td>\n",
473
  " </tr>\n",
474
  " <tr>\n",
475
- " <th>30</th>\n",
476
  " <td>tiiuae/falcon-rw-1b</td>\n",
477
  " <td>0.522545</td>\n",
478
  " <td>0.029446</td>\n",
@@ -480,7 +496,7 @@
480
  " <td>0.029446</td>\n",
481
  " </tr>\n",
482
  " <tr>\n",
483
- " <th>31</th>\n",
484
  " <td>tiiuae/falcon-rw-7b</td>\n",
485
  " <td>0.535818</td>\n",
486
  " <td>0.033185</td>\n",
@@ -488,7 +504,7 @@
488
  " <td>0.033185</td>\n",
489
  " </tr>\n",
490
  " <tr>\n",
491
- " <th>32</th>\n",
492
  " <td>tiiuae/falcon-7b</td>\n",
493
  " <td>0.559636</td>\n",
494
  " <td>0.071650</td>\n",
@@ -496,7 +512,7 @@
496
  " <td>0.071650</td>\n",
497
  " </tr>\n",
498
  " <tr>\n",
499
- " <th>33</th>\n",
500
  " <td>huggyllama/llama-7b</td>\n",
501
  " <td>0.541818</td>\n",
502
  " <td>0.040718</td>\n",
@@ -504,7 +520,7 @@
504
  " <td>0.040718</td>\n",
505
  " </tr>\n",
506
  " <tr>\n",
507
- " <th>34</th>\n",
508
  " <td>meta-llama/Llama-2-7b-chat-hf</td>\n",
509
  " <td>0.000000</td>\n",
510
  " <td>0.000000</td>\n",
@@ -512,7 +528,7 @@
512
  " <td>NaN</td>\n",
513
  " </tr>\n",
514
  " <tr>\n",
515
- " <th>35</th>\n",
516
  " <td>meta-llama/Llama-2-7b-hf</td>\n",
517
  " <td>0.566727</td>\n",
518
  " <td>0.052515</td>\n",
@@ -525,83 +541,87 @@
525
  ],
526
  "text/plain": [
527
  " model avg_acc \\\n",
528
- "0 bigscience/bloom-7b1 0.570909 \n",
529
- "1 togethercomputer/RedPajama-INCITE-7B-Base 0.525455 \n",
530
- "2 mistralai/Mistral-7B-v0.1 0.558727 \n",
531
- "3 mosaicml/mpt-7b-instruct 0.537091 \n",
532
- "4 mosaicml/mpt-7b 0.536000 \n",
533
- "5 mosaicml/mpt-7b-chat 0.538000 \n",
534
- "6 bigscience/bloomz-7b1-mt 0.546000 \n",
535
- "7 bigscience/bloomz-7b1 0.547818 \n",
536
- "8 EleutherAI/pythia-2.8b 0.537455 \n",
537
- "9 EleutherAI/pythia-1.4b 0.526545 \n",
538
- "10 EleutherAI/gpt-j-6b 0.544182 \n",
539
- "11 EleutherAI/pythia-6.9b 0.540545 \n",
540
- "12 microsoft/phi-1_5 0.521636 \n",
541
- "13 microsoft/phi-2 0.512182 \n",
542
- "14 microsoft/phi-1 0.517636 \n",
543
- "15 allenai/OLMo-7B 0.537818 \n",
544
- "16 TinyLlama/TinyLlama-1.1B-intermediate-step-143... 0.529273 \n",
545
- "17 TinyLlama/TinyLlama-1.1B-Chat-v1.0 0.528909 \n",
546
- "18 RWKV/rwkv-5-world-1b5 0.578909 \n",
547
- "19 RWKV/rwkv-5-world-3b 0.590182 \n",
548
- "20 RWKV/rwkv-4-world-3b 0.575455 \n",
549
- "21 RWKV/rwkv-4-world-1b5 0.554000 \n",
550
- "22 RWKV/rwkv-4-world-7b 0.601455 \n",
551
- "23 RWKV/HF_v5-Eagle-7B 0.621818 \n",
552
- "24 togethercomputer/RedPajama-INCITE-7B-Instruct 0.528545 \n",
553
- "25 togethercomputer/RedPajama-INCITE-7B-Chat 0.535455 \n",
554
- "26 facebook/opt-2.7b 0.521818 \n",
555
- "27 facebook/opt-6.7b 0.522909 \n",
556
- "28 facebook/opt-1.3b 0.521818 \n",
557
- "29 tiiuae/falcon-7b-instruct 0.536727 \n",
558
- "30 tiiuae/falcon-rw-1b 0.522545 \n",
559
- "31 tiiuae/falcon-rw-7b 0.535818 \n",
560
- "32 tiiuae/falcon-7b 0.559636 \n",
561
- "33 huggyllama/llama-7b 0.541818 \n",
562
- "34 meta-llama/Llama-2-7b-chat-hf 0.000000 \n",
563
- "35 meta-llama/Llama-2-7b-hf 0.566727 \n",
 
 
564
  "\n",
565
  " avg_acc_stderr xcopa (acc) xcopa (acc_stderr) \n",
566
- "0 0.061359 0.570909 0.061359 \n",
567
- "1 0.036407 0.525455 0.036407 \n",
568
- "2 0.055164 0.558727 0.055164 \n",
569
- "3 0.041919 0.537091 0.041919 \n",
570
- "4 0.042339 0.536000 0.042339 \n",
571
- "5 0.047059 0.538000 0.047059 \n",
572
- "6 0.038321 0.546000 0.038321 \n",
573
- "7 0.038920 0.547818 0.038920 \n",
574
- "8 0.026941 0.537455 0.026941 \n",
575
- "9 0.027441 0.526545 0.027441 \n",
576
- "10 0.034404 0.544182 0.034404 \n",
577
- "11 0.029689 0.540545 0.029689 \n",
578
- "12 0.026198 0.521636 0.026198 \n",
579
- "13 0.029742 0.512182 0.029742 \n",
580
- "14 0.029612 0.517636 0.029612 \n",
581
- "15 0.034147 0.537818 0.034147 \n",
582
- "16 0.029316 0.529273 0.029316 \n",
583
- "17 0.031702 0.528909 0.031702 \n",
584
- "18 0.045103 0.578909 0.045103 \n",
585
- "19 0.056241 0.590182 0.056241 \n",
586
- "20 0.040977 0.575455 0.040977 \n",
587
- "21 0.039406 0.554000 0.039406 \n",
588
- "22 0.053116 0.601455 0.053116 \n",
589
- "23 0.068986 0.621818 0.068986 \n",
590
- "24 0.036470 0.528545 0.036470 \n",
591
- "25 0.038723 0.535455 0.038723 \n",
592
- "26 0.029821 0.521818 0.029821 \n",
593
- "27 0.027216 0.522909 0.027216 \n",
594
- "28 0.029112 0.521818 0.029112 \n",
595
- "29 0.053430 0.536727 0.053430 \n",
596
- "30 0.029446 0.522545 0.029446 \n",
597
- "31 0.033185 0.535818 0.033185 \n",
598
- "32 0.071650 0.559636 0.071650 \n",
599
- "33 0.040718 0.541818 0.040718 \n",
600
- "34 0.000000 NaN NaN \n",
601
- "35 0.052515 0.566727 0.052515 "
 
 
602
  ]
603
  },
604
- "execution_count": 56,
605
  "metadata": {},
606
  "output_type": "execute_result"
607
  }
@@ -783,23 +803,23 @@
783
  },
784
  {
785
  "cell_type": "code",
786
- "execution_count": 60,
787
  "metadata": {},
788
  "outputs": [
789
  {
790
  "name": "stdout",
791
  "output_type": "stream",
792
  "text": [
793
- "total 13600\n",
794
- "-rw-r--r--@ 1 picocreator staff 822K Feb 22 13:47 bf16-all-results-and-groups.csv\n",
795
- "-rw-r--r--@ 1 picocreator staff 750K Feb 22 13:47 bf16-eng-results.csv\n",
796
- "-rw-r--r--@ 1 picocreator staff 63K Feb 22 13:47 bf16-eng-summary.csv\n",
797
- "-rw-r--r--@ 1 picocreator staff 83K Feb 22 13:47 bf16-multilang-results.csv\n",
798
- "-rw-r--r--@ 1 picocreator staff 12K Feb 22 13:47 bf16-multilang-summary.csv\n",
799
- "-rw-r--r--@ 1 picocreator staff 750K Feb 22 13:47 bf16-sorted-eng-results.csv\n",
800
- "-rw-r--r-- 1 picocreator staff 63K Feb 22 13:47 bf16-sorted-eng-summary.csv\n",
801
- "-rw-r--r--@ 1 picocreator staff 12K Feb 22 13:47 bf16-sorted-multilang-summary.csv\n",
802
- "-rw-r--r-- 1 picocreator staff 3.1M Feb 22 13:29 compiled-lm-eval-results.json\n"
803
  ]
804
  }
805
  ],
 
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
+ "execution_count": 67,
6
  "metadata": {},
7
  "outputs": [
8
  {
 
11
  "text": [
12
  "Defaulting to user installation because normal site-packages is not writeable\n",
13
  "Requirement already satisfied: pandas in /Users/picocreator/Library/Python/3.9/lib/python/site-packages (2.2.0)\n",
 
 
 
14
  "Requirement already satisfied: numpy<2,>=1.22.4 in /Users/picocreator/Library/Python/3.9/lib/python/site-packages (from pandas) (1.26.1)\n",
15
+ "Requirement already satisfied: pytz>=2020.1 in /Users/picocreator/Library/Python/3.9/lib/python/site-packages (from pandas) (2024.1)\n",
16
+ "Requirement already satisfied: python-dateutil>=2.8.2 in /Users/picocreator/Library/Python/3.9/lib/python/site-packages (from pandas) (2.8.2)\n",
17
+ "Requirement already satisfied: tzdata>=2022.7 in /Users/picocreator/Library/Python/3.9/lib/python/site-packages (from pandas) (2024.1)\n",
18
  "Requirement already satisfied: six>=1.5 in /Library/Developer/CommandLineTools/Library/Frameworks/Python3.framework/Versions/3.9/lib/python3.9/site-packages (from python-dateutil>=2.8.2->pandas) (1.15.0)\n",
19
  "\u001b[33mWARNING: You are using pip version 21.2.4; however, version 24.0 is available.\n",
20
  "You should consider upgrading via the '/Library/Developer/CommandLineTools/usr/bin/python3 -m pip install --upgrade pip' command.\u001b[0m\n"
 
36
  },
37
  {
38
  "cell_type": "code",
39
+ "execution_count": 68,
40
  "metadata": {},
41
  "outputs": [
42
  {
43
  "name": "stdout",
44
  "output_type": "stream",
45
  "text": [
46
+ "Found 2489 results.json files\n"
47
  ]
48
  }
49
  ],
 
71
  },
72
  {
73
  "cell_type": "code",
74
+ "execution_count": 69,
75
  "metadata": {},
76
  "outputs": [
77
  {
78
  "name": "stdout",
79
  "output_type": "stream",
80
  "text": [
81
+ "Processed example: {'name': 'mistralai/Mistral-7B-v0.1', 'config': {'dtype=bfloat16,trust_remote_code=True': {'confStr': 'dtype=bfloat16,trust_remote_code=True', 'confObj': {'dtype': 'bfloat16', 'trust_remote_code': 'True'}, 'results': {'xcopa': {'acc,none': 0.5587272727272727, 'acc_stderr,none': 0.0551636604460852, 'alias': 'xcopa'}, 'xcopa_et': {'acc,none': 0.466, 'acc_stderr,none': 0.02233126442325838, 'alias': ' - xcopa_et'}, 'xcopa_ht': {'acc,none': 0.512, 'acc_stderr,none': 0.02237662679792717, 'alias': ' - xcopa_ht'}, 'xcopa_id': {'acc,none': 0.582, 'acc_stderr,none': 0.022080014812228137, 'alias': ' - xcopa_id'}, 'xcopa_it': {'acc,none': 0.66, 'acc_stderr,none': 0.021206117013673066, 'alias': ' - xcopa_it'}, 'xcopa_qu': {'acc,none': 0.482, 'acc_stderr,none': 0.02236856511738799, 'alias': ' - xcopa_qu'}, 'xcopa_sw': {'acc,none': 0.518, 'acc_stderr,none': 0.02236856511738799, 'alias': ' - xcopa_sw'}, 'xcopa_ta': {'acc,none': 0.542, 'acc_stderr,none': 0.02230396677426995, 'alias': ' - xcopa_ta'}, 'xcopa_th': {'acc,none': 0.564, 'acc_stderr,none': 0.0221989546414768, 'alias': ' - xcopa_th'}, 'xcopa_tr': {'acc,none': 0.568, 'acc_stderr,none': 0.02217510926561316, 'alias': ' - xcopa_tr'}, 'xcopa_vi': {'acc,none': 0.59, 'acc_stderr,none': 0.022017482578127672, 'alias': ' - xcopa_vi'}, 'xcopa_zh': {'acc,none': 0.662, 'acc_stderr,none': 0.021175665695209407, 'alias': ' - xcopa_zh'}, 'xnli': {'acc,none': 0.43175368139223563, 'acc_stderr,none': 0.0565098070106032, 'alias': 'xnli'}, 'xnli_ar': {'acc,none': 0.334136546184739, 'acc_stderr,none': 0.009454577602463621, 'alias': ' - xnli_ar'}, 'xnli_bg': {'acc,none': 0.4534136546184739, 'acc_stderr,none': 0.009978476483838962, 'alias': ' - xnli_bg'}, 'xnli_de': {'acc,none': 0.5012048192771085, 'acc_stderr,none': 0.01002204377131557, 'alias': ' - xnli_de'}, 'xnli_el': {'acc,none': 0.41365461847389556, 'acc_stderr,none': 0.009871502159099366, 'alias': ' - xnli_el'}, 'xnli_en': {'acc,none': 0.5690763052208835, 'acc_stderr,none': 0.009925970741520641, 'alias': ' - xnli_en'}, 'xnli_es': {'acc,none': 0.4562248995983936, 'acc_stderr,none': 0.009983589197693925, 'alias': ' - xnli_es'}, 'xnli_fr': {'acc,none': 0.5100401606425703, 'acc_stderr,none': 0.010020052116889137, 'alias': ' - xnli_fr'}, 'xnli_hi': {'acc,none': 0.42650602409638555, 'acc_stderr,none': 0.009913215943570534, 'alias': ' - xnli_hi'}, 'xnli_ru': {'acc,none': 0.4967871485943775, 'acc_stderr,none': 0.010021865961119557, 'alias': ' - xnli_ru'}, 'xnli_sw': {'acc,none': 0.363855421686747, 'acc_stderr,none': 0.009643393577626719, 'alias': ' - xnli_sw'}, 'xnli_th': {'acc,none': 0.38835341365461845, 'acc_stderr,none': 0.009769028875673285, 'alias': ' - xnli_th'}, 'xnli_tr': {'acc,none': 0.43654618473895584, 'acc_stderr,none': 0.009941039791133128, 'alias': ' - xnli_tr'}, 'xnli_ur': {'acc,none': 0.3381526104417671, 'acc_stderr,none': 0.009482500057981031, 'alias': ' - xnli_ur'}, 'xnli_vi': {'acc,none': 0.41244979919678715, 'acc_stderr,none': 0.009867237678555586, 'alias': ' - xnli_vi'}, 'xnli_zh': {'acc,none': 0.3759036144578313, 'acc_stderr,none': 0.00970848885066604, 'alias': ' - xnli_zh'}, 'pawsx': {'acc,none': 0.41585714285714287, 'acc_stderr,none': 0.05538778178867068, 'alias': 'pawsx'}, 'paws_de': {'acc,none': 0.385, 'acc_stderr,none': 0.010883323176386978, 'alias': ' - paws_de'}, 'paws_en': {'acc,none': 0.3125, 'acc_stderr,none': 0.010367044555050548, 'alias': ' - paws_en'}, 'paws_es': {'acc,none': 0.356, 'acc_stderr,none': 0.010709311120344539, 'alias': ' - paws_es'}, 'paws_fr': {'acc,none': 0.4885, 'acc_stderr,none': 0.011180177690296085, 'alias': ' - paws_fr'}, 'paws_ja': {'acc,none': 0.534, 'acc_stderr,none': 0.011157250652425779, 'alias': ' - paws_ja'}, 'paws_ko': {'acc,none': 0.4175, 'acc_stderr,none': 0.011029855114729358, 'alias': ' - paws_ko'}, 'paws_zh': {'acc,none': 0.4175, 'acc_stderr,none': 0.011029855114729354, 'alias': ' - paws_zh'}, 'lambada_multilingual': {'perplexity,none': 27.047409162154935, 'perplexity_stderr,none': 8.199911438395738, 'acc,none': 0.5190374539103435, 'acc_stderr,none': 0.07089117907004505, 'alias': 'lambada_multilingual'}, 'lambada_openai_mt_de': {'perplexity,none': 43.294453054791916, 'perplexity_stderr,none': 2.4066806886162686, 'acc,none': 0.39996118765767513, 'acc_stderr,none': 0.006825125929166165, 'alias': ' - lambada_openai_mt_de'}, 'lambada_openai_mt_en': {'perplexity,none': 3.1814104914677763, 'perplexity_stderr,none': 0.05822157255540461, 'acc,none': 0.7554822433533864, 'acc_stderr,none': 0.005987967089937308, 'alias': ' - lambada_openai_mt_en'}, 'lambada_openai_mt_es': {'perplexity,none': 36.26423960927208, 'perplexity_stderr,none': 1.790606090078102, 'acc,none': 0.42790607413157383, 'acc_stderr,none': 0.00689318551693077, 'alias': ' - lambada_openai_mt_es'}, 'lambada_openai_mt_fr': {'perplexity,none': 22.218390608610928, 'perplexity_stderr,none': 1.1061897900321798, 'acc,none': 0.5214438191344848, 'acc_stderr,none': 0.006959568274744848, 'alias': ' - lambada_openai_mt_fr'}, 'lambada_openai_mt_it': {'perplexity,none': 30.278552046631987, 'perplexity_stderr,none': 1.6707259318257452, 'acc,none': 0.49039394527459734, 'acc_stderr,none': 0.006964691949428186, 'alias': ' - lambada_openai_mt_it'}, 'xwinograd': {'acc,none': 0.8141155315801304, 'acc_stderr,none': 0.047153752482205775, 'alias': 'xwinograd'}, 'xwinograd_en': {'acc,none': 0.8868817204301075, 'acc_stderr,none': 0.0065702392696682255, 'alias': ' - xwinograd_en'}, 'xwinograd_fr': {'acc,none': 0.7469879518072289, 'acc_stderr,none': 0.048008758304372776, 'alias': ' - xwinograd_fr'}, 'xwinograd_jp': {'acc,none': 0.721584984358707, 'acc_stderr,none': 0.014481292182837467, 'alias': ' - xwinograd_jp'}, 'xwinograd_pt': {'acc,none': 0.7642585551330798, 'acc_stderr,none': 0.026223308206222536, 'alias': ' - xwinograd_pt'}, 'xwinograd_ru': {'acc,none': 0.6888888888888889, 'acc_stderr,none': 0.02612567541895451, 'alias': ' - xwinograd_ru'}, 'xwinograd_zh': {'acc,none': 0.7698412698412699, 'acc_stderr,none': 0.018768533005904867, 'alias': ' - xwinograd_zh'}, 'xstorycloze': {'acc,none': 0.5916611515552614, 'acc_stderr,none': 0.07711658992261772, 'alias': 'xstorycloze'}, 'xstorycloze_ar': {'acc,none': 0.5294506949040371, 'acc_stderr,none': 0.012844785490016997, 'alias': ' - xstorycloze_ar'}, 'xstorycloze_en': {'acc,none': 0.786896095301125, 'acc_stderr,none': 0.010538187590034574, 'alias': ' - xstorycloze_en'}, 'xstorycloze_es': {'acc,none': 0.6909331568497684, 'acc_stderr,none': 0.011892023305070085, 'alias': ' - xstorycloze_es'}, 'xstorycloze_eu': {'acc,none': 0.5109199205823958, 'acc_stderr,none': 0.012864056278255043, 'alias': ' - xstorycloze_eu'}, 'xstorycloze_hi': {'acc,none': 0.5539377895433488, 'acc_stderr,none': 0.012792037953589649, 'alias': ' - xstorycloze_hi'}, 'xstorycloze_id': {'acc,none': 0.5936465916611515, 'acc_stderr,none': 0.012639429420389871, 'alias': ' - xstorycloze_id'}, 'xstorycloze_my': {'acc,none': 0.4884182660489742, 'acc_stderr,none': 0.012863672949335892, 'alias': ' - xstorycloze_my'}, 'xstorycloze_ru': {'acc,none': 0.6651224354731966, 'acc_stderr,none': 0.012145219027833156, 'alias': ' - xstorycloze_ru'}, 'xstorycloze_sw': {'acc,none': 0.5129053606882858, 'acc_stderr,none': 0.012862838605728476, 'alias': ' - xstorycloze_sw'}, 'xstorycloze_te': {'acc,none': 0.5413633355393779, 'acc_stderr,none': 0.012823020340169815, 'alias': ' - xstorycloze_te'}, 'xstorycloze_zh': {'acc,none': 0.6346790205162144, 'acc_stderr,none': 0.012391557728373984, 'alias': ' - xstorycloze_zh'}}, 'groups': {'xcopa': {'acc,none': 0.5587272727272727, 'acc_stderr,none': 0.0551636604460852, 'alias': 'xcopa'}, 'xnli': {'acc,none': 0.43175368139223563, 'acc_stderr,none': 0.0565098070106032, 'alias': 'xnli'}, 'pawsx': {'acc,none': 0.41585714285714287, 'acc_stderr,none': 0.05538778178867068, 'alias': 'pawsx'}, 'lambada_multilingual': {'perplexity,none': 27.047409162154935, 'perplexity_stderr,none': 8.199911438395738, 'acc,none': 0.5190374539103435, 'acc_stderr,none': 0.07089117907004505, 'alias': 'lambada_multilingual'}, 'xwinograd': {'acc,none': 0.8141155315801304, 'acc_stderr,none': 0.047153752482205775, 'alias': 'xwinograd'}, 'xstorycloze': {'acc,none': 0.5916611515552614, 'acc_stderr,none': 0.07711658992261772, 'alias': 'xstorycloze'}}}}}\n"
82
  ]
83
  }
84
  ],
 
156
  },
157
  {
158
  "cell_type": "code",
159
+ "execution_count": 70,
160
  "metadata": {},
161
  "outputs": [
162
  {
163
  "name": "stdout",
164
  "output_type": "stream",
165
  "text": [
166
+ "Found 38 models\n",
167
  "Models: \n",
168
+ "['mistralai/Mistral-7B-v0.1', 'mosaicml/mpt-7b-instruct', 'mosaicml/mpt-7b', 'mosaicml/mpt-7b-chat', 'bigscience/bloom-7b1', 'bigscience/bloomz-7b1-mt', 'bigscience/bloomz-7b1', 'EleutherAI/pythia-2.8b', 'EleutherAI/pythia-1.4b', 'EleutherAI/gpt-j-6b', 'EleutherAI/pythia-6.9b', 'google/gemma-2b', 'google/gemma-7b', 'microsoft/phi-1_5', 'microsoft/phi-2', 'microsoft/phi-1', 'allenai/OLMo-7B', 'TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T', 'TinyLlama/TinyLlama-1.1B-Chat-v1.0', 'RWKV/rwkv-5-world-1b5', 'RWKV/rwkv-5-world-3b', 'RWKV/rwkv-4-world-3b', 'RWKV/rwkv-4-world-1b5', 'RWKV/rwkv-4-world-7b', 'RWKV/HF_v5-Eagle-7B', 'togethercomputer/RedPajama-INCITE-7B-Base', 'togethercomputer/RedPajama-INCITE-7B-Instruct', 'togethercomputer/RedPajama-INCITE-7B-Chat', 'facebook/opt-2.7b', 'facebook/opt-6.7b', 'facebook/opt-1.3b', 'tiiuae/falcon-7b-instruct', 'tiiuae/falcon-rw-1b', 'tiiuae/falcon-rw-7b', 'tiiuae/falcon-7b', 'huggyllama/llama-7b', 'meta-llama/Llama-2-7b-chat-hf', 'meta-llama/Llama-2-7b-hf']\n",
169
  "Saved to compiled-lm-eval-results.json\n"
170
  ]
171
  }
 
199
  },
200
  {
201
  "cell_type": "code",
202
+ "execution_count": 71,
203
  "metadata": {},
204
  "outputs": [
205
  {
 
233
  " <tbody>\n",
234
  " <tr>\n",
235
  " <th>0</th>\n",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
236
  " <td>mistralai/Mistral-7B-v0.1</td>\n",
237
  " <td>0.558727</td>\n",
238
  " <td>0.055164</td>\n",
 
240
  " <td>0.055164</td>\n",
241
  " </tr>\n",
242
  " <tr>\n",
243
+ " <th>1</th>\n",
244
  " <td>mosaicml/mpt-7b-instruct</td>\n",
245
  " <td>0.537091</td>\n",
246
  " <td>0.041919</td>\n",
 
248
  " <td>0.041919</td>\n",
249
  " </tr>\n",
250
  " <tr>\n",
251
+ " <th>2</th>\n",
252
  " <td>mosaicml/mpt-7b</td>\n",
253
  " <td>0.536000</td>\n",
254
  " <td>0.042339</td>\n",
 
256
  " <td>0.042339</td>\n",
257
  " </tr>\n",
258
  " <tr>\n",
259
+ " <th>3</th>\n",
260
  " <td>mosaicml/mpt-7b-chat</td>\n",
261
  " <td>0.538000</td>\n",
262
  " <td>0.047059</td>\n",
 
264
  " <td>0.047059</td>\n",
265
  " </tr>\n",
266
  " <tr>\n",
267
+ " <th>4</th>\n",
268
+ " <td>bigscience/bloom-7b1</td>\n",
269
+ " <td>0.570909</td>\n",
270
+ " <td>0.061359</td>\n",
271
+ " <td>0.570909</td>\n",
272
+ " <td>0.061359</td>\n",
273
+ " </tr>\n",
274
+ " <tr>\n",
275
+ " <th>5</th>\n",
276
  " <td>bigscience/bloomz-7b1-mt</td>\n",
277
  " <td>0.546000</td>\n",
278
  " <td>0.038321</td>\n",
 
280
  " <td>0.038321</td>\n",
281
  " </tr>\n",
282
  " <tr>\n",
283
+ " <th>6</th>\n",
284
  " <td>bigscience/bloomz-7b1</td>\n",
285
  " <td>0.547818</td>\n",
286
  " <td>0.038920</td>\n",
 
288
  " <td>0.038920</td>\n",
289
  " </tr>\n",
290
  " <tr>\n",
291
+ " <th>7</th>\n",
292
  " <td>EleutherAI/pythia-2.8b</td>\n",
293
  " <td>0.537455</td>\n",
294
  " <td>0.026941</td>\n",
 
296
  " <td>0.026941</td>\n",
297
  " </tr>\n",
298
  " <tr>\n",
299
+ " <th>8</th>\n",
300
  " <td>EleutherAI/pythia-1.4b</td>\n",
301
  " <td>0.526545</td>\n",
302
  " <td>0.027441</td>\n",
 
304
  " <td>0.027441</td>\n",
305
  " </tr>\n",
306
  " <tr>\n",
307
+ " <th>9</th>\n",
308
  " <td>EleutherAI/gpt-j-6b</td>\n",
309
  " <td>0.544182</td>\n",
310
  " <td>0.034404</td>\n",
 
312
  " <td>0.034404</td>\n",
313
  " </tr>\n",
314
  " <tr>\n",
315
+ " <th>10</th>\n",
316
  " <td>EleutherAI/pythia-6.9b</td>\n",
317
  " <td>0.540545</td>\n",
318
  " <td>0.029689</td>\n",
 
320
  " <td>0.029689</td>\n",
321
  " </tr>\n",
322
  " <tr>\n",
323
+ " <th>11</th>\n",
324
+ " <td>google/gemma-2b</td>\n",
325
+ " <td>0.520545</td>\n",
326
+ " <td>0.025332</td>\n",
327
+ " <td>0.520545</td>\n",
328
+ " <td>0.025332</td>\n",
329
+ " </tr>\n",
330
+ " <tr>\n",
331
  " <th>12</th>\n",
332
+ " <td>google/gemma-7b</td>\n",
333
+ " <td>0.517636</td>\n",
334
+ " <td>0.027330</td>\n",
335
+ " <td>0.517636</td>\n",
336
+ " <td>0.027330</td>\n",
337
+ " </tr>\n",
338
+ " <tr>\n",
339
+ " <th>13</th>\n",
340
  " <td>microsoft/phi-1_5</td>\n",
341
  " <td>0.521636</td>\n",
342
  " <td>0.026198</td>\n",
 
344
  " <td>0.026198</td>\n",
345
  " </tr>\n",
346
  " <tr>\n",
347
+ " <th>14</th>\n",
348
  " <td>microsoft/phi-2</td>\n",
349
  " <td>0.512182</td>\n",
350
  " <td>0.029742</td>\n",
 
352
  " <td>0.029742</td>\n",
353
  " </tr>\n",
354
  " <tr>\n",
355
+ " <th>15</th>\n",
356
  " <td>microsoft/phi-1</td>\n",
357
  " <td>0.517636</td>\n",
358
  " <td>0.029612</td>\n",
 
360
  " <td>0.029612</td>\n",
361
  " </tr>\n",
362
  " <tr>\n",
363
+ " <th>16</th>\n",
364
  " <td>allenai/OLMo-7B</td>\n",
365
  " <td>0.537818</td>\n",
366
  " <td>0.034147</td>\n",
 
368
  " <td>0.034147</td>\n",
369
  " </tr>\n",
370
  " <tr>\n",
371
+ " <th>17</th>\n",
372
  " <td>TinyLlama/TinyLlama-1.1B-intermediate-step-143...</td>\n",
373
  " <td>0.529273</td>\n",
374
  " <td>0.029316</td>\n",
 
376
  " <td>0.029316</td>\n",
377
  " </tr>\n",
378
  " <tr>\n",
379
+ " <th>18</th>\n",
380
  " <td>TinyLlama/TinyLlama-1.1B-Chat-v1.0</td>\n",
381
  " <td>0.528909</td>\n",
382
  " <td>0.031702</td>\n",
 
384
  " <td>0.031702</td>\n",
385
  " </tr>\n",
386
  " <tr>\n",
387
+ " <th>19</th>\n",
388
  " <td>RWKV/rwkv-5-world-1b5</td>\n",
389
  " <td>0.578909</td>\n",
390
  " <td>0.045103</td>\n",
 
392
  " <td>0.045103</td>\n",
393
  " </tr>\n",
394
  " <tr>\n",
395
+ " <th>20</th>\n",
396
  " <td>RWKV/rwkv-5-world-3b</td>\n",
397
  " <td>0.590182</td>\n",
398
  " <td>0.056241</td>\n",
 
400
  " <td>0.056241</td>\n",
401
  " </tr>\n",
402
  " <tr>\n",
403
+ " <th>21</th>\n",
404
  " <td>RWKV/rwkv-4-world-3b</td>\n",
405
  " <td>0.575455</td>\n",
406
  " <td>0.040977</td>\n",
 
408
  " <td>0.040977</td>\n",
409
  " </tr>\n",
410
  " <tr>\n",
411
+ " <th>22</th>\n",
412
  " <td>RWKV/rwkv-4-world-1b5</td>\n",
413
  " <td>0.554000</td>\n",
414
  " <td>0.039406</td>\n",
 
416
  " <td>0.039406</td>\n",
417
  " </tr>\n",
418
  " <tr>\n",
419
+ " <th>23</th>\n",
420
  " <td>RWKV/rwkv-4-world-7b</td>\n",
421
  " <td>0.601455</td>\n",
422
  " <td>0.053116</td>\n",
 
424
  " <td>0.053116</td>\n",
425
  " </tr>\n",
426
  " <tr>\n",
427
+ " <th>24</th>\n",
428
  " <td>RWKV/HF_v5-Eagle-7B</td>\n",
429
  " <td>0.621818</td>\n",
430
  " <td>0.068986</td>\n",
 
432
  " <td>0.068986</td>\n",
433
  " </tr>\n",
434
  " <tr>\n",
435
+ " <th>25</th>\n",
436
+ " <td>togethercomputer/RedPajama-INCITE-7B-Base</td>\n",
437
+ " <td>0.525455</td>\n",
438
+ " <td>0.036407</td>\n",
439
+ " <td>0.525455</td>\n",
440
+ " <td>0.036407</td>\n",
441
+ " </tr>\n",
442
+ " <tr>\n",
443
+ " <th>26</th>\n",
444
  " <td>togethercomputer/RedPajama-INCITE-7B-Instruct</td>\n",
445
  " <td>0.528545</td>\n",
446
  " <td>0.036470</td>\n",
 
448
  " <td>0.036470</td>\n",
449
  " </tr>\n",
450
  " <tr>\n",
451
+ " <th>27</th>\n",
452
  " <td>togethercomputer/RedPajama-INCITE-7B-Chat</td>\n",
453
  " <td>0.535455</td>\n",
454
  " <td>0.038723</td>\n",
 
456
  " <td>0.038723</td>\n",
457
  " </tr>\n",
458
  " <tr>\n",
459
+ " <th>28</th>\n",
460
  " <td>facebook/opt-2.7b</td>\n",
461
  " <td>0.521818</td>\n",
462
  " <td>0.029821</td>\n",
 
464
  " <td>0.029821</td>\n",
465
  " </tr>\n",
466
  " <tr>\n",
467
+ " <th>29</th>\n",
468
  " <td>facebook/opt-6.7b</td>\n",
469
  " <td>0.522909</td>\n",
470
  " <td>0.027216</td>\n",
 
472
  " <td>0.027216</td>\n",
473
  " </tr>\n",
474
  " <tr>\n",
475
+ " <th>30</th>\n",
476
  " <td>facebook/opt-1.3b</td>\n",
477
  " <td>0.521818</td>\n",
478
  " <td>0.029112</td>\n",
 
480
  " <td>0.029112</td>\n",
481
  " </tr>\n",
482
  " <tr>\n",
483
+ " <th>31</th>\n",
484
  " <td>tiiuae/falcon-7b-instruct</td>\n",
485
  " <td>0.536727</td>\n",
486
  " <td>0.053430</td>\n",
 
488
  " <td>0.053430</td>\n",
489
  " </tr>\n",
490
  " <tr>\n",
491
+ " <th>32</th>\n",
492
  " <td>tiiuae/falcon-rw-1b</td>\n",
493
  " <td>0.522545</td>\n",
494
  " <td>0.029446</td>\n",
 
496
  " <td>0.029446</td>\n",
497
  " </tr>\n",
498
  " <tr>\n",
499
+ " <th>33</th>\n",
500
  " <td>tiiuae/falcon-rw-7b</td>\n",
501
  " <td>0.535818</td>\n",
502
  " <td>0.033185</td>\n",
 
504
  " <td>0.033185</td>\n",
505
  " </tr>\n",
506
  " <tr>\n",
507
+ " <th>34</th>\n",
508
  " <td>tiiuae/falcon-7b</td>\n",
509
  " <td>0.559636</td>\n",
510
  " <td>0.071650</td>\n",
 
512
  " <td>0.071650</td>\n",
513
  " </tr>\n",
514
  " <tr>\n",
515
+ " <th>35</th>\n",
516
  " <td>huggyllama/llama-7b</td>\n",
517
  " <td>0.541818</td>\n",
518
  " <td>0.040718</td>\n",
 
520
  " <td>0.040718</td>\n",
521
  " </tr>\n",
522
  " <tr>\n",
523
+ " <th>36</th>\n",
524
  " <td>meta-llama/Llama-2-7b-chat-hf</td>\n",
525
  " <td>0.000000</td>\n",
526
  " <td>0.000000</td>\n",
 
528
  " <td>NaN</td>\n",
529
  " </tr>\n",
530
  " <tr>\n",
531
+ " <th>37</th>\n",
532
  " <td>meta-llama/Llama-2-7b-hf</td>\n",
533
  " <td>0.566727</td>\n",
534
  " <td>0.052515</td>\n",
 
541
  ],
542
  "text/plain": [
543
  " model avg_acc \\\n",
544
+ "0 mistralai/Mistral-7B-v0.1 0.558727 \n",
545
+ "1 mosaicml/mpt-7b-instruct 0.537091 \n",
546
+ "2 mosaicml/mpt-7b 0.536000 \n",
547
+ "3 mosaicml/mpt-7b-chat 0.538000 \n",
548
+ "4 bigscience/bloom-7b1 0.570909 \n",
549
+ "5 bigscience/bloomz-7b1-mt 0.546000 \n",
550
+ "6 bigscience/bloomz-7b1 0.547818 \n",
551
+ "7 EleutherAI/pythia-2.8b 0.537455 \n",
552
+ "8 EleutherAI/pythia-1.4b 0.526545 \n",
553
+ "9 EleutherAI/gpt-j-6b 0.544182 \n",
554
+ "10 EleutherAI/pythia-6.9b 0.540545 \n",
555
+ "11 google/gemma-2b 0.520545 \n",
556
+ "12 google/gemma-7b 0.517636 \n",
557
+ "13 microsoft/phi-1_5 0.521636 \n",
558
+ "14 microsoft/phi-2 0.512182 \n",
559
+ "15 microsoft/phi-1 0.517636 \n",
560
+ "16 allenai/OLMo-7B 0.537818 \n",
561
+ "17 TinyLlama/TinyLlama-1.1B-intermediate-step-143... 0.529273 \n",
562
+ "18 TinyLlama/TinyLlama-1.1B-Chat-v1.0 0.528909 \n",
563
+ "19 RWKV/rwkv-5-world-1b5 0.578909 \n",
564
+ "20 RWKV/rwkv-5-world-3b 0.590182 \n",
565
+ "21 RWKV/rwkv-4-world-3b 0.575455 \n",
566
+ "22 RWKV/rwkv-4-world-1b5 0.554000 \n",
567
+ "23 RWKV/rwkv-4-world-7b 0.601455 \n",
568
+ "24 RWKV/HF_v5-Eagle-7B 0.621818 \n",
569
+ "25 togethercomputer/RedPajama-INCITE-7B-Base 0.525455 \n",
570
+ "26 togethercomputer/RedPajama-INCITE-7B-Instruct 0.528545 \n",
571
+ "27 togethercomputer/RedPajama-INCITE-7B-Chat 0.535455 \n",
572
+ "28 facebook/opt-2.7b 0.521818 \n",
573
+ "29 facebook/opt-6.7b 0.522909 \n",
574
+ "30 facebook/opt-1.3b 0.521818 \n",
575
+ "31 tiiuae/falcon-7b-instruct 0.536727 \n",
576
+ "32 tiiuae/falcon-rw-1b 0.522545 \n",
577
+ "33 tiiuae/falcon-rw-7b 0.535818 \n",
578
+ "34 tiiuae/falcon-7b 0.559636 \n",
579
+ "35 huggyllama/llama-7b 0.541818 \n",
580
+ "36 meta-llama/Llama-2-7b-chat-hf 0.000000 \n",
581
+ "37 meta-llama/Llama-2-7b-hf 0.566727 \n",
582
  "\n",
583
  " avg_acc_stderr xcopa (acc) xcopa (acc_stderr) \n",
584
+ "0 0.055164 0.558727 0.055164 \n",
585
+ "1 0.041919 0.537091 0.041919 \n",
586
+ "2 0.042339 0.536000 0.042339 \n",
587
+ "3 0.047059 0.538000 0.047059 \n",
588
+ "4 0.061359 0.570909 0.061359 \n",
589
+ "5 0.038321 0.546000 0.038321 \n",
590
+ "6 0.038920 0.547818 0.038920 \n",
591
+ "7 0.026941 0.537455 0.026941 \n",
592
+ "8 0.027441 0.526545 0.027441 \n",
593
+ "9 0.034404 0.544182 0.034404 \n",
594
+ "10 0.029689 0.540545 0.029689 \n",
595
+ "11 0.025332 0.520545 0.025332 \n",
596
+ "12 0.027330 0.517636 0.027330 \n",
597
+ "13 0.026198 0.521636 0.026198 \n",
598
+ "14 0.029742 0.512182 0.029742 \n",
599
+ "15 0.029612 0.517636 0.029612 \n",
600
+ "16 0.034147 0.537818 0.034147 \n",
601
+ "17 0.029316 0.529273 0.029316 \n",
602
+ "18 0.031702 0.528909 0.031702 \n",
603
+ "19 0.045103 0.578909 0.045103 \n",
604
+ "20 0.056241 0.590182 0.056241 \n",
605
+ "21 0.040977 0.575455 0.040977 \n",
606
+ "22 0.039406 0.554000 0.039406 \n",
607
+ "23 0.053116 0.601455 0.053116 \n",
608
+ "24 0.068986 0.621818 0.068986 \n",
609
+ "25 0.036407 0.525455 0.036407 \n",
610
+ "26 0.036470 0.528545 0.036470 \n",
611
+ "27 0.038723 0.535455 0.038723 \n",
612
+ "28 0.029821 0.521818 0.029821 \n",
613
+ "29 0.027216 0.522909 0.027216 \n",
614
+ "30 0.029112 0.521818 0.029112 \n",
615
+ "31 0.053430 0.536727 0.053430 \n",
616
+ "32 0.029446 0.522545 0.029446 \n",
617
+ "33 0.033185 0.535818 0.033185 \n",
618
+ "34 0.071650 0.559636 0.071650 \n",
619
+ "35 0.040718 0.541818 0.040718 \n",
620
+ "36 0.000000 NaN NaN \n",
621
+ "37 0.052515 0.566727 0.052515 "
622
  ]
623
  },
624
+ "execution_count": 71,
625
  "metadata": {},
626
  "output_type": "execute_result"
627
  }
 
803
  },
804
  {
805
  "cell_type": "code",
806
+ "execution_count": 72,
807
  "metadata": {},
808
  "outputs": [
809
  {
810
  "name": "stdout",
811
  "output_type": "stream",
812
  "text": [
813
+ "total 14648\n",
814
+ "-rw-r--r--@ 1 picocreator staff 928K Feb 22 16:11 bf16-all-results-and-groups.csv\n",
815
+ "-rw-r--r--@ 1 picocreator staff 853K Feb 22 16:11 bf16-eng-results.csv\n",
816
+ "-rw-r--r--@ 1 picocreator staff 72K Feb 22 16:11 bf16-eng-summary.csv\n",
817
+ "-rw-r--r--@ 1 picocreator staff 88K Feb 22 16:11 bf16-multilang-results.csv\n",
818
+ "-rw-r--r--@ 1 picocreator staff 12K Feb 22 16:11 bf16-multilang-summary.csv\n",
819
+ "-rw-r--r--@ 1 picocreator staff 853K Feb 22 16:11 bf16-sorted-eng-results.csv\n",
820
+ "-rw-r--r--@ 1 picocreator staff 72K Feb 22 16:11 bf16-sorted-eng-summary.csv\n",
821
+ "-rw-r--r--@ 1 picocreator staff 12K Feb 22 16:11 bf16-sorted-multilang-summary.csv\n",
822
+ "-rw-r--r-- 1 picocreator staff 3.6M Feb 22 16:11 compiled-lm-eval-results.json\n"
823
  ]
824
  }
825
  ],
summary/bf16-all-results-and-groups.csv CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:efe0cfb4d53f702527e397a867224e060d703e62ada1d2d30f27afb760285dcc
3
- size 891577
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:79c0ed6dde13f302de5180898a26bca19bfd07b92c102f6738a9b1ca43ea44f0
3
+ size 950167
summary/bf16-eng-results.csv CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fc3e9d0ffb858e106041beda531bf2706383c0de773fe1da2acbaff57e83a56d
3
- size 814835
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:08ca430e7b064b8cd9ab78e1b230876c16e06d56fddc1667c4f9f4536a0bbd7d
3
+ size 873202
summary/bf16-eng-summary.csv CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1a467d22e9de972f40c888cc1631a3e7b4f83205e42e81ff6948cf2605d8d67c
3
- size 68567
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8dd9cb40d54d8a894d80ee62df2159ad1ba604cbe29e1aecbfebca34b7734df3
3
+ size 73735
summary/bf16-multilang-results.csv CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f110f59474d5afc6d8a11983fdcd20305b5605a5841639c41832df2e5c936fab
3
- size 89501
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8f3aea105ff0beeab9995b22f2482012285a7e5fc0495995758860f6ec9487c8
3
+ size 89719
summary/bf16-multilang-summary.csv CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d7adebd6e470e37b90765af94f322d8c97cee6889dd95b89fa334d8d4232e787
3
  size 12705
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e555bbea7c320c1b9652c9e11d6437b2b0b0304c07897da81665b2d67d0790e5
3
  size 12705
summary/bf16-sorted-eng-results.csv CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f96ad2ba65a1636ba52d86423e4fb8f56aea553f082bea17a0a3602a69f4b680
3
- size 814835
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b79009af2ddc22d099f765582e326c8079a58c272c959c9cb7abf1a7994bf214
3
+ size 873202
summary/bf16-sorted-eng-summary.csv CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:16fa216e0c759fe6485f1e74a79cd7aac1d764c4262f1f12cddc7208218c1cbe
3
- size 68567
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:94202ac71713a7d332b71e73f4b6b57f41fe5d3598f36fc76bc29e14dc78fb73
3
+ size 73735
summary/compiled-lm-eval-results.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9f6418312758cca1c72ff03bc08227542bb8a11bfac2db0d86de85ceed6c722d
3
- size 3506124
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7d2acccafdee4b14f1e4a897b57612cbc4a0ff551d1450f53908a89461314111
3
+ size 3765842