grg commited on
Commit
a60e7dc
1 Parent(s): 1d3b79e

Adding Mistral-Nemo and gemma

Browse files
static/leaderboard.csv CHANGED
@@ -1,30 +1,34 @@
1
  Model,Ordinal (Win rate),Cardinal (Score),RO Stability,Stress,CFI,SRMR,RMSEA
2
- phi-3-mini-128k-instruct,0.34424603174603174,0.32984992817164005,0.039299993295009855,0.281800547806919,0.5861361111111111,0.42524166666666674,0.3974944444444444
3
- phi-3-medium-128k-instruct,0.3516865079365079,0.30802986933853177,0.09692037989916814,0.2651981204439735,0.43025555555555556,0.5503277777777777,0.5381722222222222
4
- phi-3.5-mini-instruct,0.25744047619047616,0.2680653144619754,0.0361229186530762,0.28422749224983457,0.40715555555555555,0.5721138888888888,0.5507833333333333
5
- phi-3.5-MoE-instruct,0.41617063492063494,0.36128192067041315,0.10985291697837646,0.2739229692168671,0.5530944444444444,0.4248777777777778,0.40345
6
- Mistral-7B-Instruct-v0.1,0.23214285714285715,0.26609566354811315,0.027216280472015988,0.2829498135031582,0.38917777777777773,0.5561138888888888,0.530213888888889
7
- Mistral-7B-Instruct-v0.2,0.36904761904761907,0.32133832899241477,0.14417876497818388,0.265188983528973,0.3802722222222222,0.5727305555555555,0.5483611111111111
8
- Mistral-7B-Instruct-v0.3,0.27132936507936506,0.26572479479146804,0.07960539866974455,0.2742399030139009,0.31385,0.6241,0.6081333333333333
9
- Mixtral-8x7B-Instruct-v0.1,0.4667658730158731,0.3819009850972602,0.21473356319081474,0.2624402608740656,0.45275,0.5034666666666667,0.4905694444444444
10
- Mixtral-8x22B-Instruct-v0.1,0.3625992063492063,0.31529864972153404,0.1414001940345544,0.2548838005881672,0.3772361111111111,0.5810888888888889,0.5844750000000001
11
- command_r_plus,0.5922619047619047,0.4995356672762356,0.3429686514651868,0.23811982320641845,0.6033000000000001,0.3740166666666668,0.3667527777777777
12
- llama_3_8b_instruct,0.5153769841269842,0.4295836112681494,0.24527785038654715,0.245806400289881,0.5498222222222222,0.42656388888888896,0.42189444444444446
13
- llama_3_70b_instruct,0.7876984126984127,0.6839540364836003,0.607020698814379,0.18525883672204868,0.7210055555555557,0.2346083333333333,0.25758888888888887
14
- llama_3.1_8b_instruct,0.5773809523809523,0.4786874422110324,0.4295080949846363,0.22060228669473025,0.4305722222222223,0.5455027777777777,0.553
15
- llama_3.1_70b_instruct,0.8253968253968255,0.7172545013390067,0.691365862744007,0.1709718847084183,0.6979472222222223,0.2636777777777777,0.2907250000000001
16
- llama_3.1_405b_instruct_4bit,0.7405753968253967,0.6490864350383405,0.7232098126552619,0.1702199925365422,0.4875722222222223,0.4963444444444445,0.5211555555555556
17
- llama_3.2_1b_instruct,0.22718253968253965,0.2522036562381785,0.027192115495770382,0.29255310096654275,0.37450000000000006,0.5990222222222223,0.5740638888888888
18
- llama_3.2_3b_instruct,0.4221230158730159,0.3615804465210719,0.13450325180647235,0.27485276839064654,0.5017,0.44956666666666667,0.4226500000000001
19
- Qwen2-7B-Instruct,0.42757936507936506,0.36370005127542027,0.25108519506513916,0.25776537005719313,0.3560861111111111,0.6009722222222222,0.5920888888888889
20
- Qwen2-72B-Instruct,0.5823412698412699,0.5461212335522644,0.6465993243020925,0.20297742879025626,0.3045,0.6543138888888889,0.6646361111111111
21
- Qwen2.5-0.5B-Instruct,0.30406746031746035,0.3005554090516966,0.002970456550606876,0.2928913315666324,0.5371250000000001,0.44709722222222226,0.404575
22
- Qwen2.5-7B-Instruct,0.632440476190476,0.5163098181421168,0.333554494486959,0.2505866550331236,0.6473694444444444,0.30400277777777773,0.29651944444444434
23
- Qwen2.5-32B-Instruct,0.7395833333333334,0.656917654644944,0.6724190751477237,0.1806656189868978,0.5603222222222223,0.40237500000000004,0.41161666666666663
24
- Qwen2.5-72B-Instruct,0.8298611111111112,0.7104489147495714,0.6974116787371809,0.16176650806326276,0.6734583333333333,0.2993,0.3184472222222223
25
- gpt-3.5-turbo-0125,0.26190476190476186,0.28218378886707396,0.08240359836763214,0.28728574920060357,0.3873055555555555,0.599925,0.572238888888889
26
- gpt-4o-0513,0.6944444444444444,0.5989532974661671,0.5122163952167618,0.19201420113771173,0.6235416666666667,0.34458611111111115,0.3441805555555555
27
- gpt-4o-mini-2024-07-18,0.3968253968253968,0.3418785071827972,0.13575309046266867,0.2707065266105181,0.44214722222222214,0.5004583333333332,0.47896666666666665
28
- Mistral-Large-Instruct-2407,0.8501984126984127,0.7374229691535793,0.7644582301049158,0.16944638941325085,0.6510750000000001,0.31028611111111104,0.3297916666666667
29
- Mistral-Small-Instruct-2409,0.7842261904761906,0.6890378862258165,0.6416815833333804,0.1894343546381,0.6840472222222221,0.2601583333333335,0.2888777777777778
30
- dummy,0.1929563492063492,0.2291015386716794,-0.009004148398032956,0.2928877637010999,0.3755222222222222,0.622275,0.5915305555555557
 
 
 
 
 
1
  Model,Ordinal (Win rate),Cardinal (Score),RO Stability,Stress,CFI,SRMR,RMSEA
2
+ gemma-2-2b-it,0.3732638888888889,0.3309858600428668,0.14746606707946294,0.263080165752695,0.40932500000000005,0.550475,0.5377472222222222
3
+ gemma-2-9b-it,0.7387152777777778,0.6020857503693501,0.43782539244147833,0.20116278903333318,0.7543666666666667,0.23989444444444452,0.24792499999999995
4
+ gemma-2-27b-it,0.6319444444444444,0.5270946699366518,0.3917304045417486,0.2058170364515589,0.5997861111111111,0.37121111111111116,0.37292222222222227
5
+ phi-3-mini-128k-instruct,0.3246527777777778,0.32984992817164005,0.039299993295009855,0.281800547806919,0.5861361111111111,0.42524166666666674,0.3974944444444444
6
+ phi-3-medium-128k-instruct,0.3307291666666667,0.30802986933853177,0.09692037989916814,0.2651981204439735,0.43025555555555556,0.5503277777777777,0.5381722222222222
7
+ phi-3.5-mini-instruct,0.24348958333333334,0.2680653144619754,0.0361229186530762,0.28422749224983457,0.40715555555555555,0.5721138888888888,0.5507833333333333
8
+ phi-3.5-MoE-instruct,0.3962673611111111,0.36128192067041315,0.10985291697837646,0.2739229692168671,0.5530944444444444,0.4248777777777778,0.40345
9
+ Mistral-7B-Instruct-v0.1,0.21961805555555555,0.26609566354811315,0.027216280472015988,0.2829498135031582,0.38917777777777773,0.5561138888888888,0.530213888888889
10
+ Mistral-7B-Instruct-v0.2,0.3506944444444444,0.32133832899241477,0.14417876497818388,0.265188983528973,0.3802722222222222,0.5727305555555555,0.5483611111111111
11
+ Mistral-7B-Instruct-v0.3,0.2569444444444444,0.26572479479146804,0.07960539866974455,0.2742399030139009,0.31385,0.6241,0.6081333333333333
12
+ Mixtral-8x7B-Instruct-v0.1,0.4431423611111111,0.3819009850972602,0.21473356319081474,0.2624402608740656,0.45275,0.5034666666666667,0.4905694444444444
13
+ Mixtral-8x22B-Instruct-v0.1,0.3381076388888889,0.31529864972153404,0.1414001940345544,0.2548838005881672,0.3772361111111111,0.5810888888888889,0.5844750000000001
14
+ command_r_plus,0.5828993055555556,0.4995356672762356,0.3429686514651868,0.23811982320641845,0.6033000000000001,0.3740166666666668,0.3667527777777777
15
+ llama_3_8b_instruct,0.4943576388888889,0.4295836112681494,0.24527785038654715,0.245806400289881,0.5498222222222222,0.42656388888888896,0.42189444444444446
16
+ llama_3_70b_instruct,0.7855902777777778,0.6839540364836003,0.607020698814379,0.18525883672204868,0.7210055555555557,0.2346083333333333,0.25758888888888887
17
+ llama_3.1_8b_instruct,0.5737847222222222,0.4786874422110324,0.4295080949846363,0.22060228669473025,0.4305722222222223,0.5455027777777777,0.553
18
+ llama_3.1_70b_instruct,0.8272569444444444,0.7172545013390067,0.691365862744007,0.1709718847084183,0.6979472222222223,0.2636777777777777,0.2907250000000001
19
+ llama_3.1_405b_instruct_4bit,0.7404513888888888,0.6490864350383405,0.7232098126552619,0.1702199925365422,0.4875722222222223,0.4963444444444445,0.5211555555555556
20
+ llama_3.2_1b_instruct,0.2152777777777778,0.2522036562381785,0.027192115495770382,0.29255310096654275,0.37450000000000006,0.5990222222222223,0.5740638888888888
21
+ llama_3.2_3b_instruct,0.4001736111111111,0.3615804465210719,0.13450325180647235,0.27485276839064654,0.5017,0.44956666666666667,0.4226500000000001
22
+ Qwen2-7B-Instruct,0.4123263888888889,0.36370005127542027,0.25108519506513916,0.25776537005719313,0.3560861111111111,0.6009722222222222,0.5920888888888889
23
+ Qwen2-72B-Instruct,0.5802951388888888,0.5461212335522644,0.6465993243020925,0.20297742879025626,0.3045,0.6543138888888889,0.6646361111111111
24
+ Qwen2.5-0.5B-Instruct,0.2899305555555556,0.3005554090516966,0.002970456550606876,0.2928913315666324,0.5371250000000001,0.44709722222222226,0.404575
25
+ Qwen2.5-7B-Instruct,0.6163194444444444,0.5163098181421168,0.333554494486959,0.2505866550331236,0.6473694444444444,0.30400277777777773,0.29651944444444434
26
+ Qwen2.5-32B-Instruct,0.7408854166666666,0.656917654644944,0.6724190751477237,0.1806656189868978,0.5603222222222223,0.40237500000000004,0.41161666666666663
27
+ Qwen2.5-72B-Instruct,0.8276909722222222,0.7104489147495714,0.6974116787371809,0.16176650806326276,0.6734583333333333,0.2993,0.3184472222222223
28
+ gpt-3.5-turbo-0125,0.24609375,0.28218378886707396,0.08240359836763214,0.28728574920060357,0.3873055555555555,0.599925,0.572238888888889
29
+ gpt-4o-0513,0.6948784722222222,0.5989532974661671,0.5122163952167618,0.19201420113771173,0.6235416666666667,0.34458611111111115,0.3441805555555555
30
+ gpt-4o-mini-2024-07-18,0.37890625,0.3418785071827972,0.13575309046266867,0.2707065266105181,0.44214722222222214,0.5004583333333332,0.47896666666666665
31
+ Mistral-Large-Instruct-2407,0.8467881944444444,0.7374229691535793,0.7644582301049158,0.16944638941325085,0.6510750000000001,0.31028611111111104,0.3297916666666667
32
+ Mistral-Nemo-Instruct-2407,0.5876736111111112,0.5262426956484347,0.4414072595011627,0.21142636170606344,0.5161,0.42923055555555545,0.43113055555555546
33
+ Mistral-Small-Instruct-2409,0.7782118055555556,0.6890378862258165,0.6416815833333804,0.1894343546381,0.6840472222222221,0.2601583333333335,0.2888777777777778
34
+ dummy,0.18880208333333334,0.2291015386716794,-0.009004148398032956,0.2928877637010999,0.3755222222222222,0.622275,0.5915305555555557
static/models_data/Mistral-Nemo-Instruct-2407/cfa_metrics.csv ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ Context chunk,CFI,TLI,SRMR,RMSEA
2
+ chunk_0,0.415225,0.38944999999999996,0.55465,0.5547249999999999
3
+ chunk_1,0.36997500000000005,0.33294999999999997,0.5631999999999999,0.55495
4
+ chunk_2,0.907125,0.8771,0.08945,0.079925
5
+ chunk_3,0.6317,0.606625,0.3249,0.32384999999999997
6
+ chunk_4,0.383275,0.34015,0.5604500000000001,0.5612
7
+ chunk_chess_0,0.4017,0.3686,0.54955,0.568475
8
+ chunk_grammar_1,0.364375,0.357275,0.550575,0.524525
9
+ chunk_no_conv,0.440975,0.409975,0.5550499999999999,0.58055
10
+ chunk_svs_no_conv,0.73055,0.6734749999999999,0.11524999999999999,0.131975
static/models_data/Mistral-Nemo-Instruct-2407/matrix.svg ADDED
static/models_data/Mistral-Nemo-Instruct-2407/model_detail.html ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ <p>
2
+ This open-source model was created by <a target="_blank" href="https://mistral.ai/">Mistral AI<a>.
3
+ You can find the release blog post <a target="_blank" href="https://mistral.ai/news/mistral-nemo/">here</a>.
4
+ The model is available on the huggingface hub: <a target="_blank" href="https://huggingface.co/mistralai/Mistral-Nemo-Instruct-2409">https://huggingface.co/mistralai/Mistral-Nemo-Instruct-2409</a>.
5
+ The 12B model supports up to 128K token context windows.
6
+ </p>
static/models_data/Mistral-Nemo-Instruct-2407/ranks.svg ADDED
static/models_data/Mistral-Nemo-Instruct-2407/structure.svg ADDED
static/models_data/cardinal.svg CHANGED
static/models_data/gemma-2-27b-it/cfa_metrics.csv ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ Context chunk,CFI,TLI,SRMR,RMSEA
2
+ chunk_0,0.38434999999999997,0.33115,0.5558,0.563675
3
+ chunk_1,0.648725,0.642075,0.3245,0.30475
4
+ chunk_2,0.656725,0.6615,0.30912500000000004,0.294925
5
+ chunk_3,0.48872499999999997,0.5168,0.5375749999999999,0.509525
6
+ chunk_4,0.6917,0.676175,0.316475,0.30295
7
+ chunk_chess_0,0.612925,0.568575,0.32689999999999997,0.370425
8
+ chunk_grammar_1,0.35895,0.3204,0.5567249999999999,0.575025
9
+ chunk_no_conv,0.8836499999999999,0.8490500000000001,0.101775,0.1179
10
+ chunk_svs_no_conv,0.672325,0.6561,0.31202500000000005,0.317125
static/models_data/gemma-2-27b-it/matrix.svg ADDED
static/models_data/gemma-2-27b-it/model_detail.html ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ <p>
2
+ This open-source model was created by <a target="_blank" href="https://deepmind.google/">Google DeepMind<a>.
3
+ You can find more details <a target="_blank" href="https://ai.google.dev/gemma/docs/model_card_2">here</a>.
4
+ The model is available on the huggingface hub: <a target="_blank" href="https://huggingface.co/google/gemma-2-27b-it">https://huggingface.co/google/gemma-2-27b-it</a>.
5
+ The 27B model supports up to 8K token context windows.
6
+ </p>
static/models_data/gemma-2-27b-it/ranks.svg ADDED
static/models_data/gemma-2-27b-it/structure.svg ADDED
static/models_data/gemma-2-2b-it/cfa_metrics.csv ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ Context chunk,CFI,TLI,SRMR,RMSEA
2
+ chunk_0,0.5731999999999999,0.5288250000000001,0.328825,0.292725
3
+ chunk_1,0.413275,0.3805,0.552825,0.534675
4
+ chunk_2,0.169675,0.120225,0.7786,0.770025
5
+ chunk_3,0.38245,0.32499999999999996,0.5515749999999999,0.5559000000000001
6
+ chunk_4,0.440925,0.416675,0.544675,0.5445749999999999
7
+ chunk_chess_0,0.20505,0.19345,0.776275,0.77115
8
+ chunk_grammar_1,0.65285,1.199975,0.319175,0.28132500000000005
9
+ chunk_no_conv,0.8465,0.7966500000000001,0.10232499999999999,0.08935000000000001
10
+ chunk_svs_no_conv,0.0,0.0,1.0,1.0
static/models_data/gemma-2-2b-it/matrix.svg ADDED
static/models_data/gemma-2-2b-it/model_detail.html ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ <p>
2
+ This open-source model was created by <a target="_blank" href="https://deepmind.google/">Google DeepMind<a>.
3
+ You can find more details <a target="_blank" href="https://ai.google.dev/gemma/docs/model_card_2">here</a>.
4
+ The model is available on the huggingface hub: <a target="_blank" href="https://huggingface.co/google/gemma-2-2b-it">https://huggingface.co/google/gemma-2-2b-it</a>.
5
+ The 2B model supports up to 8K token context windows.
6
+ </p>
static/models_data/gemma-2-2b-it/ranks.svg ADDED
static/models_data/gemma-2-2b-it/structure.svg ADDED
static/models_data/gemma-2-9b-it/cfa_metrics.csv ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ Context chunk,CFI,TLI,SRMR,RMSEA
2
+ chunk_0,0.888425,0.8612500000000001,0.094,0.0795
3
+ chunk_1,0.897575,0.879875,0.09025,0.0885
4
+ chunk_2,0.699975,0.6807000000000001,0.3105,0.3014
5
+ chunk_3,0.673925,0.657025,0.31282499999999996,0.307475
6
+ chunk_4,0.47187500000000004,0.45735000000000003,0.5402499999999999,0.5511
7
+ chunk_chess_0,0.911425,0.8817499999999999,0.08405,0.1146
8
+ chunk_grammar_1,0.901925,0.87,0.082675,0.110675
9
+ chunk_no_conv,0.702225,0.683525,0.31505000000000005,0.33699999999999997
10
+ chunk_svs_no_conv,0.64195,0.610225,0.32944999999999997,0.341075
static/models_data/gemma-2-9b-it/matrix.svg ADDED
static/models_data/gemma-2-9b-it/model_detail.html ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ <p>
2
+ This open-source model was created by <a target="_blank" href="https://deepmind.google/">Google DeepMind<a>.
3
+ You can find more details <a target="_blank" href="https://ai.google.dev/gemma/docs/model_card_2">here</a>.
4
+ The model is available on the huggingface hub: <a target="_blank" href="https://huggingface.co/google/gemma-2-9b-it">https://huggingface.co/google/gemma-2-9b-it</a>.
5
+ The 9B model supports up to 8K token context windows.
6
+ </p>
static/models_data/gemma-2-9b-it/ranks.svg ADDED
static/models_data/gemma-2-9b-it/structure.svg ADDED
static/models_data/llama_3.2_1b_instruct/cfa_metrics.csv CHANGED
@@ -1,10 +1,3 @@
1
  Context chunk,CFI,TLI,SRMR,RMSEA
2
- chunk_0,0.3755,0.32637499999999997,0.549175,0.541125
3
- chunk_1,0.162625,0.13435,0.780375,0.77835
4
- chunk_2,0.387375,0.38245,0.551025,0.525875
5
- chunk_3,0.167275,0.116375,0.774475,0.77235
6
- chunk_4,0.4379,0.504325,0.54175,0.5132
7
  chunk_chess_0,1.0,1.4561,0.09875,0.0
8
- chunk_grammar_1,0.36235,0.5313,0.55545,0.519675
9
- chunk_no_conv,0.227475,0.213625,0.7701,0.766
10
  chunk_svs_no_conv,0.25,-0.47565,0.7701,0.75
 
1
  Context chunk,CFI,TLI,SRMR,RMSEA
 
 
 
 
 
2
  chunk_chess_0,1.0,1.4561,0.09875,0.0
 
 
3
  chunk_svs_no_conv,0.25,-0.47565,0.7701,0.75
static/models_data/ordinal.svg CHANGED