zhuohan-7 commited on
Commit
16f4bdb
·
verified ·
1 Parent(s): b767f30

Upload folder using huggingface_hub

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. results/cross_lingual/few_shot/cross_logiqa.csv +4 -0
  2. results/cross_lingual/few_shot/cross_mmlu.csv +4 -0
  3. results/cross_lingual/few_shot/cross_xquad.csv +4 -0
  4. results/cross_lingual/zero_shot/cross_logiqa.csv +7 -0
  5. results/cross_lingual/zero_shot/cross_mmlu.csv +7 -0
  6. results/cross_lingual/zero_shot/cross_xquad.csv +7 -0
  7. results/cultural_reasoning/few_shot/cn_eval.csv +4 -0
  8. results/cultural_reasoning/few_shot/ph_eval.csv +4 -0
  9. results/cultural_reasoning/few_shot/sg_eval.csv +4 -0
  10. results/cultural_reasoning/few_shot/sg_eval_v1_cleaned.csv +4 -0
  11. results/cultural_reasoning/few_shot/us_eval.csv +4 -0
  12. results/cultural_reasoning/zero_shot/cn_eval.csv +7 -0
  13. results/cultural_reasoning/zero_shot/ph_eval.csv +7 -0
  14. results/cultural_reasoning/zero_shot/sg_eval.csv +7 -0
  15. results/cultural_reasoning/zero_shot/sg_eval_v1_cleaned.csv +7 -0
  16. results/cultural_reasoning/zero_shot/us_eval.csv +7 -0
  17. results/dialogue/few_shot/dream.csv +4 -0
  18. results/dialogue/zero_shot/dialogsum.csv +8 -0
  19. results/dialogue/zero_shot/dream.csv +8 -0
  20. results/dialogue/zero_shot/samsum.csv +8 -0
  21. results/emotion/few_shot/ind_emotion.csv +4 -0
  22. results/emotion/few_shot/sst2.csv +4 -0
  23. results/emotion/zero_shot/ind_emotion.csv +8 -0
  24. results/emotion/zero_shot/sst2.csv +8 -0
  25. results/flores_translation/few_shot/ind2eng.csv +4 -0
  26. results/flores_translation/few_shot/vie2eng.csv +4 -0
  27. results/flores_translation/few_shot/zho2eng.csv +4 -0
  28. results/flores_translation/few_shot/zsm2eng.csv +4 -0
  29. results/flores_translation/zero_shot/ind2eng.csv +7 -0
  30. results/flores_translation/zero_shot/vie2eng.csv +7 -0
  31. results/flores_translation/zero_shot/zho2eng.csv +7 -0
  32. results/flores_translation/zero_shot/zsm2eng.csv +7 -0
  33. results/fundamental_nlp_tasks/few_shot/c3.csv +3 -0
  34. results/fundamental_nlp_tasks/few_shot/cola.csv +4 -0
  35. results/fundamental_nlp_tasks/few_shot/mnli.csv +4 -0
  36. results/fundamental_nlp_tasks/few_shot/mrpc.csv +4 -0
  37. results/fundamental_nlp_tasks/few_shot/ocnli.csv +4 -0
  38. results/fundamental_nlp_tasks/few_shot/qnli.csv +4 -0
  39. results/fundamental_nlp_tasks/few_shot/qqp.csv +4 -0
  40. results/fundamental_nlp_tasks/few_shot/rte.csv +4 -0
  41. results/fundamental_nlp_tasks/few_shot/wnli.csv +4 -0
  42. results/fundamental_nlp_tasks/zero_shot/c3.csv +8 -0
  43. results/fundamental_nlp_tasks/zero_shot/cola.csv +8 -0
  44. results/fundamental_nlp_tasks/zero_shot/mnli.csv +8 -0
  45. results/fundamental_nlp_tasks/zero_shot/mrpc.csv +9 -0
  46. results/fundamental_nlp_tasks/zero_shot/ocnli.csv +8 -0
  47. results/fundamental_nlp_tasks/zero_shot/qnli.csv +9 -0
  48. results/fundamental_nlp_tasks/zero_shot/qqp.csv +8 -0
  49. results/fundamental_nlp_tasks/zero_shot/rte.csv +9 -0
  50. results/fundamental_nlp_tasks/zero_shot/wnli.csv +9 -0
results/cross_lingual/few_shot/cross_logiqa.csv CHANGED
@@ -1 +1,5 @@
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
 
 
 
 
 
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
2
+ Meta-Llama-3.1-70B,0.6241883116883117,0.5274350649350649,0.5717473426837402,0.7386363636363636,0.6306818181818182,0.6193181818181818,0.6079545454545454,0.6022727272727273,0.625,0.5454545454545454
3
+ Meta-Llama-3-8B,0.4569805194805195,0.25519480519480514,0.32750096946546897,0.5340909090909091,0.5284090909090909,0.44886363636363635,0.4602272727272727,0.38636363636363635,0.44886363636363635,0.39204545454545453
4
+ llama3-8b-cpt-sea-lionv2-base,0.4553571428571429,0.2600649350649351,0.33105611198328916,0.45454545454545453,0.48295454545454547,0.48863636363636365,0.48295454545454547,0.4431818181818182,0.45454545454545453,0.3806818181818182
5
+ Meta-Llama-3.1-8B,0.4586038961038961,0.28230519480519484,0.3494794808137755,0.5056818181818182,0.5170454545454546,0.4602272727272727,0.4943181818181818,0.39204545454545453,0.45454545454545453,0.38636363636363635
results/cross_lingual/few_shot/cross_mmlu.csv CHANGED
@@ -1 +1,5 @@
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
 
 
 
 
 
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
2
+ Meta-Llama-3.1-70B,0.74,0.6445714285714286,0.688997110969635,0.7933333333333333,0.72,0.7466666666666667,0.7333333333333333,0.7466666666666667,0.7466666666666667,0.6933333333333334
3
+ Meta-Llama-3-8B,0.5295238095238095,0.29771428571428576,0.381140094029779,0.6133333333333333,0.4666666666666667,0.58,0.5,0.5333333333333333,0.47333333333333333,0.54
4
+ llama3-8b-cpt-sea-lionv2-base,0.5114285714285715,0.33390476190476204,0.404026266610288,0.6333333333333333,0.47333333333333333,0.52,0.47333333333333333,0.52,0.49333333333333335,0.4666666666666667
5
+ Meta-Llama-3.1-8B,0.5304761904761904,0.34419047619047627,0.4174958519516044,0.68,0.5,0.5533333333333333,0.4866666666666667,0.5333333333333333,0.5133333333333333,0.44666666666666666
results/cross_lingual/few_shot/cross_xquad.csv CHANGED
@@ -1 +1,5 @@
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
 
 
 
 
 
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
2
+ Meta-Llama-3.1-70B,0.9588235294117647,0.9382352941176471,0.9484176926084648,0.9647058823529412,0.9571428571428572,0.9554621848739496,0.957983193277311,,,
3
+ Meta-Llama-3-8B,0.8951680672268908,0.8123949579831933,0.851775323760744,0.9277310924369748,0.8756302521008403,0.8907563025210085,0.8865546218487395,,,
4
+ llama3-8b-cpt-sea-lionv2-base,0.9054621848739495,0.8464285714285714,0.8749507477272823,0.9193277310924369,0.8899159663865546,0.9126050420168067,0.9,,,
5
+ Meta-Llama-3.1-8B,0.9063025210084034,0.8340336134453781,0.8686675538861947,0.9319327731092437,0.8890756302521008,0.9117647058823529,0.892436974789916,,,
results/cross_lingual/zero_shot/cross_logiqa.csv CHANGED
@@ -1,7 +1,14 @@
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
 
2
  Meta-Llama-3.1-8B-Instruct,0.4472402597402597,0.43717532467532455,0.44215052105151864,0.5227272727272727,0.4602272727272727,0.4715909090909091,0.4715909090909091,0.4147727272727273,0.3977272727272727,0.39204545454545453
 
 
3
  Meta-Llama-3.1-70B-Instruct,0.6566558441558442,0.598051948051948,0.6259852839118454,0.7443181818181818,0.7215909090909091,0.6647727272727273,0.6534090909090909,0.6193181818181818,0.625,0.5681818181818182
 
4
  gemma-2-9b-it,0.6185064935064934,0.5592532467532466,0.5873893507784849,0.6647727272727273,0.6761363636363636,0.5625,0.6193181818181818,0.5795454545454546,0.6420454545454546,0.5852272727272727
5
  Meta-Llama-3-70B-Instruct,0.6306818181818182,0.6186688311688312,0.6246175698800746,0.7102272727272727,0.6875,0.6420454545454546,0.6193181818181818,0.6022727272727273,0.6136363636363636,0.5397727272727273
6
  sg_llama3_70b_inst,0.6217532467532468,0.5629870129870129,0.590912649920049,0.7272727272727273,0.6590909090909091,0.6477272727272727,0.6079545454545454,0.6136363636363636,0.5795454545454546,0.5170454545454546
 
 
7
  GPT4o_0513,0.7159090909090909,0.6941558441558444,0.7048646724637749,0.7613636363636364,0.7670454545454546,0.6988636363636364,0.6988636363636364,0.7045454545454546,0.6761363636363636,0.7045454545454546
 
 
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
2
+ Qwen2-7B-Instruct,0.564935064935065,0.48279220779220783,0.5206435955861558,0.6590909090909091,0.7045454545454546,0.5340909090909091,0.5738636363636364,0.5397727272727273,0.5113636363636364,0.4318181818181818
3
  Meta-Llama-3.1-8B-Instruct,0.4472402597402597,0.43717532467532455,0.44215052105151864,0.5227272727272727,0.4602272727272727,0.4715909090909091,0.4715909090909091,0.4147727272727273,0.3977272727272727,0.39204545454545453
4
+ Qwen2-72B-Instruct,0.6728896103896104,0.6762987012987012,0.6745898487968579,0.75,0.8068181818181818,0.6534090909090909,0.6193181818181818,0.625,0.6534090909090909,0.6022727272727273
5
+ Meta-Llama-3-8B-Instruct,0.4610389610389611,0.45097402597402597,0.4559509553669637,0.5965909090909091,0.48295454545454547,0.5,0.4602272727272727,0.42045454545454547,0.4034090909090909,0.36363636363636365
6
  Meta-Llama-3.1-70B-Instruct,0.6566558441558442,0.598051948051948,0.6259852839118454,0.7443181818181818,0.7215909090909091,0.6647727272727273,0.6534090909090909,0.6193181818181818,0.625,0.5681818181818182
7
+ SeaLLMs-v3-7B-Chat,0.5551948051948051,0.5142857142857142,0.5339578453833284,0.6022727272727273,0.6647727272727273,0.5738636363636364,0.5454545454545454,0.5170454545454546,0.5,0.48295454545454547
8
  gemma-2-9b-it,0.6185064935064934,0.5592532467532466,0.5873893507784849,0.6647727272727273,0.6761363636363636,0.5625,0.6193181818181818,0.5795454545454546,0.6420454545454546,0.5852272727272727
9
  Meta-Llama-3-70B-Instruct,0.6306818181818182,0.6186688311688312,0.6246175698800746,0.7102272727272727,0.6875,0.6420454545454546,0.6193181818181818,0.6022727272727273,0.6136363636363636,0.5397727272727273
10
  sg_llama3_70b_inst,0.6217532467532468,0.5629870129870129,0.590912649920049,0.7272727272727273,0.6590909090909091,0.6477272727272727,0.6079545454545454,0.6136363636363636,0.5795454545454546,0.5170454545454546
11
+ gemma-2-2b-it,0.4780844155844156,0.4448051948051948,0.46084478401384643,0.5568181818181818,0.5,0.5,0.48863636363636365,0.4375,0.4602272727272727,0.4034090909090909
12
+ llama3-8b-cpt-sea-lionv2-instruct,0.48538961038961037,0.4472402597402597,0.46553468284769084,0.5284090909090909,0.5113636363636364,0.5227272727272727,0.5227272727272727,0.48863636363636365,0.44886363636363635,0.375
13
  GPT4o_0513,0.7159090909090909,0.6941558441558444,0.7048646724637749,0.7613636363636364,0.7670454545454546,0.6988636363636364,0.6988636363636364,0.7045454545454546,0.6761363636363636,0.7045454545454546
14
+ Meta-Llama-3.1-8B,0.29464285714285715,0.07857142857142858,0.12406015034269886,0.32954545454545453,0.32386363636363635,0.2840909090909091,0.2727272727272727,0.2840909090909091,0.3125,0.2556818181818182
results/cross_lingual/zero_shot/cross_mmlu.csv CHANGED
@@ -1,7 +1,14 @@
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
 
2
  Meta-Llama-3.1-8B-Instruct,0.5619047619047618,0.5020952380952383,0.5303189947159841,0.66,0.5266666666666666,0.5733333333333334,0.5266666666666666,0.5533333333333333,0.5533333333333333,0.54
 
 
3
  Meta-Llama-3.1-70B-Instruct,0.7638095238095238,0.7716190476190474,0.7676944251955988,0.8,0.74,0.7666666666666667,0.7666666666666667,0.76,0.7666666666666667,0.7466666666666667
 
4
  gemma-2-9b-it,0.7161904761904762,0.7163809523809525,0.7162857015727578,0.7733333333333333,0.74,0.7066666666666667,0.64,0.7266666666666667,0.6933333333333334,0.7333333333333333
5
  Meta-Llama-3-70B-Instruct,0.758095238095238,0.7316190476190477,0.7446218665971989,0.7933333333333333,0.7466666666666667,0.7733333333333333,0.7466666666666667,0.7733333333333333,0.7333333333333333,0.74
6
  sg_llama3_70b_inst,0.7342857142857142,0.7079999999999999,0.7209033280007295,0.82,0.6866666666666666,0.7333333333333333,0.6933333333333334,0.78,0.7266666666666667,0.7
 
 
7
  GPT4o_0513,0.8038095238095239,0.8506666666666668,0.8265745643832277,0.8266666666666667,0.7933333333333333,0.8,0.7666666666666667,0.7933333333333333,0.8266666666666667,0.82
 
 
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
2
+ Qwen2-7B-Instruct,0.6628571428571428,0.5257142857142858,0.5863736263242921,0.76,0.6666666666666666,0.72,0.5933333333333334,0.7066666666666667,0.6133333333333333,0.58
3
  Meta-Llama-3.1-8B-Instruct,0.5619047619047618,0.5020952380952383,0.5303189947159841,0.66,0.5266666666666666,0.5733333333333334,0.5266666666666666,0.5533333333333333,0.5533333333333333,0.54
4
+ Qwen2-72B-Instruct,0.779047619047619,0.7611428571428573,0.7699911663398871,0.8133333333333334,0.7933333333333333,0.7933333333333333,0.7333333333333333,0.7666666666666667,0.78,0.7733333333333333
5
+ Meta-Llama-3-8B-Instruct,0.5733333333333334,0.4742857142857144,0.5191272726777197,0.7133333333333334,0.5866666666666667,0.5733333333333334,0.5866666666666667,0.5066666666666667,0.5333333333333333,0.5133333333333333
6
  Meta-Llama-3.1-70B-Instruct,0.7638095238095238,0.7716190476190474,0.7676944251955988,0.8,0.74,0.7666666666666667,0.7666666666666667,0.76,0.7666666666666667,0.7466666666666667
7
+ SeaLLMs-v3-7B-Chat,0.6628571428571429,0.6135238095238095,0.6372370860992635,0.74,0.6933333333333334,0.6933333333333334,0.6466666666666666,0.68,0.6,0.5866666666666667
8
  gemma-2-9b-it,0.7161904761904762,0.7163809523809525,0.7162857015727578,0.7733333333333333,0.74,0.7066666666666667,0.64,0.7266666666666667,0.6933333333333334,0.7333333333333333
9
  Meta-Llama-3-70B-Instruct,0.758095238095238,0.7316190476190477,0.7446218665971989,0.7933333333333333,0.7466666666666667,0.7733333333333333,0.7466666666666667,0.7733333333333333,0.7333333333333333,0.74
10
  sg_llama3_70b_inst,0.7342857142857142,0.7079999999999999,0.7209033280007295,0.82,0.6866666666666666,0.7333333333333333,0.6933333333333334,0.78,0.7266666666666667,0.7
11
+ gemma-2-2b-it,0.5780952380952381,0.5480000000000002,0.5626454667971265,0.7,0.5866666666666667,0.5866666666666667,0.5333333333333333,0.5666666666666667,0.5333333333333333,0.54
12
+ llama3-8b-cpt-sea-lionv2-instruct,0.6104761904761905,0.5685714285714286,0.5887791368067445,0.72,0.6,0.6133333333333333,0.58,0.6333333333333333,0.5933333333333334,0.5333333333333333
13
  GPT4o_0513,0.8038095238095239,0.8506666666666668,0.8265745643832277,0.8266666666666667,0.7933333333333333,0.8,0.7666666666666667,0.7933333333333333,0.8266666666666667,0.82
14
+ Meta-Llama-3.1-8B,0.42000000000000004,0.1535238095238095,0.22485552968513808,0.4866666666666667,0.43333333333333335,0.44,0.38666666666666666,0.47333333333333333,0.3333333333333333,0.38666666666666666
results/cross_lingual/zero_shot/cross_xquad.csv CHANGED
@@ -1,7 +1,14 @@
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
 
2
  Meta-Llama-3.1-8B-Instruct,0.9287815126050419,0.8867647058823529,0.9072869161050563,0.9420168067226891,0.9193277310924369,0.9361344537815126,0.9176470588235294,,,
 
 
3
  Meta-Llama-3.1-70B-Instruct,0.9615546218487395,0.9512605042016806,0.9563798632627071,0.9647058823529412,0.9512605042016806,0.9647058823529412,0.965546218487395,,,
 
4
  gemma-2-9b-it,0.9567226890756303,0.9350840336134454,0.9457796088507574,0.9663865546218487,0.9411764705882353,0.9588235294117647,0.9605042016806723,,,
5
  Meta-Llama-3-70B-Instruct,0.9592436974789916,0.9422268907563025,0.9506591499208973,0.9714285714285714,0.9403361344537815,0.9596638655462185,0.965546218487395,,,
6
  sg_llama3_70b_inst,0.9552521008403361,0.9453781512605042,0.9502894779607259,0.9663865546218487,0.9436974789915966,0.957983193277311,0.9529411764705882,,,
 
 
7
  GPT4o_0513,0.9605042016806723,0.951890756302521,0.9561780814209724,0.965546218487395,0.9537815126050421,0.9630252100840336,0.9596638655462185,,,
 
 
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
2
+ Qwen2-7B-Instruct,0.9418067226890756,0.9046218487394958,0.9228398561109394,0.957983193277311,0.9336134453781513,0.9436974789915966,0.9319327731092437,,,
3
  Meta-Llama-3.1-8B-Instruct,0.9287815126050419,0.8867647058823529,0.9072869161050563,0.9420168067226891,0.9193277310924369,0.9361344537815126,0.9176470588235294,,,
4
+ Qwen2-72B-Instruct,0.9613445378151261,0.9516806722689075,0.956488195931227,0.9638655462184874,0.9596638655462185,0.9596638655462185,0.9621848739495799,,,
5
+ Meta-Llama-3-8B-Instruct,0.9210084033613445,0.880672268907563,0.9003888121913395,0.9411764705882353,0.9033613445378151,0.9260504201680673,0.9134453781512605,,,
6
  Meta-Llama-3.1-70B-Instruct,0.9615546218487395,0.9512605042016806,0.9563798632627071,0.9647058823529412,0.9512605042016806,0.9647058823529412,0.965546218487395,,,
7
+ SeaLLMs-v3-7B-Chat,0.9403361344537815,0.917016806722689,0.9285300818164836,0.9537815126050421,0.9378151260504202,0.9394957983193277,0.9302521008403362,,,
8
  gemma-2-9b-it,0.9567226890756303,0.9350840336134454,0.9457796088507574,0.9663865546218487,0.9411764705882353,0.9588235294117647,0.9605042016806723,,,
9
  Meta-Llama-3-70B-Instruct,0.9592436974789916,0.9422268907563025,0.9506591499208973,0.9714285714285714,0.9403361344537815,0.9596638655462185,0.965546218487395,,,
10
  sg_llama3_70b_inst,0.9552521008403361,0.9453781512605042,0.9502894779607259,0.9663865546218487,0.9436974789915966,0.957983193277311,0.9529411764705882,,,
11
+ gemma-2-2b-it,0.917016806722689,0.8665966386554622,0.8910940700869288,0.934453781512605,0.9025210084033614,0.9193277310924369,0.9117647058823529,,,
12
+ llama3-8b-cpt-sea-lionv2-instruct,0.9365546218487395,0.9086134453781513,0.9223724784871395,0.9420168067226891,0.926890756302521,0.9436974789915966,0.9336134453781513,,,
13
  GPT4o_0513,0.9605042016806723,0.951890756302521,0.9561780814209724,0.965546218487395,0.9537815126050421,0.9630252100840336,0.9596638655462185,,,
14
+ Meta-Llama-3.1-8B,0.5619747899159664,0.21176470588235294,0.307613678067924,0.4756302521008403,0.6579831932773109,0.5571428571428572,0.5571428571428572,,,
results/cultural_reasoning/few_shot/cn_eval.csv CHANGED
@@ -1 +1,5 @@
1
  Model,Accuracy
 
 
 
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3.1-70B,0.5904761904761905
3
+ Meta-Llama-3-8B,0.42857142857142855
4
+ llama3-8b-cpt-sea-lionv2-base,0.38095238095238093
5
+ Meta-Llama-3.1-8B,0.4380952380952381
results/cultural_reasoning/few_shot/ph_eval.csv CHANGED
@@ -1 +1,5 @@
1
  Model,Accuracy
 
 
 
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3.1-70B,0.66
3
+ Meta-Llama-3-8B,0.5
4
+ llama3-8b-cpt-sea-lionv2-base,0.54
5
+ Meta-Llama-3.1-8B,0.52
results/cultural_reasoning/few_shot/sg_eval.csv CHANGED
@@ -1 +1,5 @@
1
  Model,Accuracy
 
 
 
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3.1-70B,0.7475728155339806
3
+ Meta-Llama-3-8B,0.6601941747572816
4
+ llama3-8b-cpt-sea-lionv2-base,0.6310679611650486
5
+ Meta-Llama-3.1-8B,0.6504854368932039
results/cultural_reasoning/few_shot/sg_eval_v1_cleaned.csv CHANGED
@@ -1 +1,5 @@
1
  Model,Accuracy
 
 
 
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3.1-70B,0.7794117647058824
3
+ Meta-Llama-3-8B,0.6470588235294118
4
+ llama3-8b-cpt-sea-lionv2-base,0.6470588235294118
5
+ Meta-Llama-3.1-8B,0.6029411764705882
results/cultural_reasoning/few_shot/us_eval.csv CHANGED
@@ -1 +1,5 @@
1
  Model,Accuracy
 
 
 
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3.1-70B,0.8691588785046729
3
+ Meta-Llama-3-8B,0.6542056074766355
4
+ llama3-8b-cpt-sea-lionv2-base,0.6542056074766355
5
+ Meta-Llama-3.1-8B,0.7009345794392523
results/cultural_reasoning/zero_shot/cn_eval.csv CHANGED
@@ -1,7 +1,14 @@
1
  Model,Accuracy
 
2
  Meta-Llama-3.1-8B-Instruct,0.4857142857142857
 
 
3
  Meta-Llama-3.1-70B-Instruct,0.5428571428571428
 
4
  gemma-2-9b-it,0.580952380952381
5
  Meta-Llama-3-70B-Instruct,0.5333333333333333
6
  sg_llama3_70b_inst,0.5523809523809524
 
 
7
  GPT4o_0513,0.8095238095238095
 
 
1
  Model,Accuracy
2
+ Qwen2-7B-Instruct,0.8285714285714286
3
  Meta-Llama-3.1-8B-Instruct,0.4857142857142857
4
+ Qwen2-72B-Instruct,0.8285714285714286
5
+ Meta-Llama-3-8B-Instruct,0.4666666666666667
6
  Meta-Llama-3.1-70B-Instruct,0.5428571428571428
7
+ SeaLLMs-v3-7B-Chat,0.819047619047619
8
  gemma-2-9b-it,0.580952380952381
9
  Meta-Llama-3-70B-Instruct,0.5333333333333333
10
  sg_llama3_70b_inst,0.5523809523809524
11
+ gemma-2-2b-it,0.3619047619047619
12
+ llama3-8b-cpt-sea-lionv2-instruct,0.49523809523809526
13
  GPT4o_0513,0.8095238095238095
14
+ Meta-Llama-3.1-8B,0.3904761904761905
results/cultural_reasoning/zero_shot/ph_eval.csv CHANGED
@@ -1,7 +1,14 @@
1
  Model,Accuracy
 
2
  Meta-Llama-3.1-8B-Instruct,0.6
 
 
3
  Meta-Llama-3.1-70B-Instruct,0.68
 
4
  gemma-2-9b-it,0.58
5
  Meta-Llama-3-70B-Instruct,0.63
6
  sg_llama3_70b_inst,0.69
 
 
7
  GPT4o_0513,0.77
 
 
1
  Model,Accuracy
2
+ Qwen2-7B-Instruct,0.52
3
  Meta-Llama-3.1-8B-Instruct,0.6
4
+ Qwen2-72B-Instruct,0.62
5
+ Meta-Llama-3-8B-Instruct,0.58
6
  Meta-Llama-3.1-70B-Instruct,0.68
7
+ SeaLLMs-v3-7B-Chat,0.47
8
  gemma-2-9b-it,0.58
9
  Meta-Llama-3-70B-Instruct,0.63
10
  sg_llama3_70b_inst,0.69
11
+ gemma-2-2b-it,0.4
12
+ llama3-8b-cpt-sea-lionv2-instruct,0.56
13
  GPT4o_0513,0.77
14
+ Meta-Llama-3.1-8B,0.43
results/cultural_reasoning/zero_shot/sg_eval.csv CHANGED
@@ -1,7 +1,14 @@
1
  Model,Accuracy
 
2
  Meta-Llama-3.1-8B-Instruct,0.5728155339805825
 
 
3
  Meta-Llama-3.1-70B-Instruct,0.7184466019417476
 
4
  gemma-2-9b-it,0.6699029126213593
5
  Meta-Llama-3-70B-Instruct,0.7087378640776699
6
  sg_llama3_70b_inst,0.6699029126213593
 
 
7
  GPT4o_0513,0.8446601941747572
 
 
1
  Model,Accuracy
2
+ Qwen2-7B-Instruct,0.6796116504854369
3
  Meta-Llama-3.1-8B-Instruct,0.5728155339805825
4
+ Qwen2-72B-Instruct,0.7378640776699029
5
+ Meta-Llama-3-8B-Instruct,0.6504854368932039
6
  Meta-Llama-3.1-70B-Instruct,0.7184466019417476
7
+ SeaLLMs-v3-7B-Chat,0.7184466019417476
8
  gemma-2-9b-it,0.6699029126213593
9
  Meta-Llama-3-70B-Instruct,0.7087378640776699
10
  sg_llama3_70b_inst,0.6699029126213593
11
+ gemma-2-2b-it,0.5533980582524272
12
+ llama3-8b-cpt-sea-lionv2-instruct,0.6504854368932039
13
  GPT4o_0513,0.8446601941747572
14
+ Meta-Llama-3.1-8B,0.39805825242718446
results/cultural_reasoning/zero_shot/sg_eval_v1_cleaned.csv CHANGED
@@ -1,7 +1,14 @@
1
  Model,Accuracy
 
2
  Meta-Llama-3.1-8B-Instruct,0.5294117647058824
 
 
3
  Meta-Llama-3.1-70B-Instruct,0.6617647058823529
 
4
  gemma-2-9b-it,0.6029411764705882
5
  Meta-Llama-3-70B-Instruct,0.6617647058823529
6
  sg_llama3_70b_inst,0.6176470588235294
 
 
7
  GPT4o_0513,0.8088235294117647
 
 
1
  Model,Accuracy
2
+ Qwen2-7B-Instruct,0.6323529411764706
3
  Meta-Llama-3.1-8B-Instruct,0.5294117647058824
4
+ Qwen2-72B-Instruct,0.6764705882352942
5
+ Meta-Llama-3-8B-Instruct,0.5882352941176471
6
  Meta-Llama-3.1-70B-Instruct,0.6617647058823529
7
+ SeaLLMs-v3-7B-Chat,0.5882352941176471
8
  gemma-2-9b-it,0.6029411764705882
9
  Meta-Llama-3-70B-Instruct,0.6617647058823529
10
  sg_llama3_70b_inst,0.6176470588235294
11
+ gemma-2-2b-it,0.4852941176470588
12
+ llama3-8b-cpt-sea-lionv2-instruct,0.6617647058823529
13
  GPT4o_0513,0.8088235294117647
14
+ Meta-Llama-3.1-8B,0.4117647058823529
results/cultural_reasoning/zero_shot/us_eval.csv CHANGED
@@ -1,7 +1,14 @@
1
  Model,Accuracy
 
2
  Meta-Llama-3.1-8B-Instruct,0.7289719626168224
 
 
3
  Meta-Llama-3.1-70B-Instruct,0.8411214953271028
 
4
  gemma-2-9b-it,0.8130841121495327
5
  Meta-Llama-3-70B-Instruct,0.8691588785046729
6
  sg_llama3_70b_inst,0.8598130841121495
 
 
7
  GPT4o_0513,0.8691588785046729
 
 
1
  Model,Accuracy
2
+ Qwen2-7B-Instruct,0.7289719626168224
3
  Meta-Llama-3.1-8B-Instruct,0.7289719626168224
4
+ Qwen2-72B-Instruct,0.8785046728971962
5
+ Meta-Llama-3-8B-Instruct,0.7009345794392523
6
  Meta-Llama-3.1-70B-Instruct,0.8411214953271028
7
+ SeaLLMs-v3-7B-Chat,0.6915887850467289
8
  gemma-2-9b-it,0.8130841121495327
9
  Meta-Llama-3-70B-Instruct,0.8691588785046729
10
  sg_llama3_70b_inst,0.8598130841121495
11
+ gemma-2-2b-it,0.6915887850467289
12
+ llama3-8b-cpt-sea-lionv2-instruct,0.7009345794392523
13
  GPT4o_0513,0.8691588785046729
14
+ Meta-Llama-3.1-8B,0.3644859813084112
results/dialogue/few_shot/dream.csv CHANGED
@@ -1 +1,5 @@
1
  Model,Accuracy
 
 
 
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3.1-70B,0.9495345418912298
3
+ Meta-Llama-3-8B,0.8280254777070064
4
+ llama3-8b-cpt-sea-lionv2-base,0.8520333170014699
5
+ Meta-Llama-3.1-8B,0.8569328760411563
results/dialogue/zero_shot/dialogsum.csv CHANGED
@@ -1,5 +1,13 @@
1
  Model,Average,ROUGE-1,ROUGE-2,ROUGE-L
 
2
  Meta-Llama-3.1-8B-Instruct,0.24990743661648132,0.3515557454075673,0.12563120411564133,0.2725353603262354
 
 
 
 
 
3
  Meta-Llama-3-70B-Instruct,0.2557065499979308,0.36058417323628,0.12758087337786866,0.2789546033796438
4
  sg_llama3_70b_inst,0.26633840691332344,0.3692028513115729,0.1412505883866801,0.2885617810417173
 
 
5
  GPT4o_0513,0.2375730297294346,0.3364674648846549,0.11718194476069822,0.25906967954295057
 
1
  Model,Average,ROUGE-1,ROUGE-2,ROUGE-L
2
+ Qwen2-7B-Instruct,0.2092663759873139,0.30486100228371826,0.09413830506038247,0.22879982061784096
3
  Meta-Llama-3.1-8B-Instruct,0.24990743661648132,0.3515557454075673,0.12563120411564133,0.2725353603262354
4
+ Qwen2-72B-Instruct,0.2183280630214023,0.316174552903144,0.10156543495268992,0.23724420120837297
5
+ Meta-Llama-3-8B-Instruct,0.23978455271183616,0.33971099717559883,0.1203340311564728,0.2593086298034369
6
+ Meta-Llama-3.1-70B-Instruct,0.2526239717396146,0.35714386898604744,0.1258832921736473,0.27484475405914904
7
+ SeaLLMs-v3-7B-Chat,0.24891094210680076,0.35393482223136147,0.12172072639345373,0.27107727769558715
8
+ gemma-2-9b-it,0.2560682231168516,0.36247455000865003,0.12571639767749476,0.2800137216644101
9
  Meta-Llama-3-70B-Instruct,0.2557065499979308,0.36058417323628,0.12758087337786866,0.2789546033796438
10
  sg_llama3_70b_inst,0.26633840691332344,0.3692028513115729,0.1412505883866801,0.2885617810417173
11
+ gemma-2-2b-it,0.2597323674875989,0.36848124762381895,0.12622684440269072,0.2844890104362872
12
+ llama3-8b-cpt-sea-lionv2-instruct,0.25777587511641403,0.35911990072292727,0.13269121463917308,0.2815165099871418
13
  GPT4o_0513,0.2375730297294346,0.3364674648846549,0.11718194476069822,0.25906967954295057
results/dialogue/zero_shot/dream.csv CHANGED
@@ -1,5 +1,13 @@
1
  Model,Accuracy
 
2
  Meta-Llama-3.1-8B-Instruct,0.9039686428221461
 
 
 
 
 
3
  Meta-Llama-3-70B-Instruct,0.9480646741793238
4
  sg_llama3_70b_inst,0.9524742773150416
 
 
5
  GPT4o_0513,0.9583537481626654
 
1
  Model,Accuracy
2
+ Qwen2-7B-Instruct,0.9353258206761391
3
  Meta-Llama-3.1-8B-Instruct,0.9039686428221461
4
+ Qwen2-72B-Instruct,0.9612934835864773
5
+ Meta-Llama-3-8B-Instruct,0.8946594806467418
6
+ Meta-Llama-3.1-70B-Instruct,0.9559039686428221
7
+ SeaLLMs-v3-7B-Chat,0.9265066144047036
8
+ gemma-2-9b-it,0.9416952474277315
9
  Meta-Llama-3-70B-Instruct,0.9480646741793238
10
  sg_llama3_70b_inst,0.9524742773150416
11
+ gemma-2-2b-it,0.8510534051935326
12
+ llama3-8b-cpt-sea-lionv2-instruct,0.8858402743753062
13
  GPT4o_0513,0.9583537481626654
results/dialogue/zero_shot/samsum.csv CHANGED
@@ -1,5 +1,13 @@
1
  Model,Average,ROUGE-1,ROUGE-2,ROUGE-L
 
2
  Meta-Llama-3.1-8B-Instruct,0.2891505262763006,0.4001228010515775,0.15677431231732958,0.31055446545999466
 
 
 
 
 
3
  Meta-Llama-3-70B-Instruct,0.2893525314227379,0.4030746211134018,0.15236139065578,0.3126215824990321
4
  sg_llama3_70b_inst,0.3146051103643872,0.4271361513564755,0.18238925099430264,0.33428992874238356
 
 
5
  GPT4o_0513,0.27736679291505306,0.386750207633093,0.14889081847621596,0.2964593526358502
 
1
  Model,Average,ROUGE-1,ROUGE-2,ROUGE-L
2
+ Qwen2-7B-Instruct,0.25668781132950264,0.36375948458827556,0.12939804942125302,0.27690589997897935
3
  Meta-Llama-3.1-8B-Instruct,0.2891505262763006,0.4001228010515775,0.15677431231732958,0.31055446545999466
4
+ Qwen2-72B-Instruct,0.2800906719573321,0.3887231369098802,0.15237661526996754,0.29917226369214855
5
+ Meta-Llama-3-8B-Instruct,0.2846315092346869,0.39397110152251813,0.154320846916639,0.30560257926490364
6
+ Meta-Llama-3.1-70B-Instruct,0.28934874612070227,0.4036295731242805,0.15211190810296196,0.31230475713486433
7
+ SeaLLMs-v3-7B-Chat,0.2959981719045788,0.4078820748825196,0.16338306782652476,0.316729373004692
8
+ gemma-2-9b-it,0.3100514077180449,0.4289412957792292,0.16727050182456474,0.3339424255503407
9
  Meta-Llama-3-70B-Instruct,0.2893525314227379,0.4030746211134018,0.15236139065578,0.3126215824990321
10
  sg_llama3_70b_inst,0.3146051103643872,0.4271361513564755,0.18238925099430264,0.33428992874238356
11
+ gemma-2-2b-it,0.31118787136959813,0.4324251755711466,0.16441328335793207,0.33672515517971563
12
+ llama3-8b-cpt-sea-lionv2-instruct,0.306997595680581,0.4214048099551701,0.1709790451938523,0.3286089318927205
13
  GPT4o_0513,0.27736679291505306,0.386750207633093,0.14889081847621596,0.2964593526358502
results/emotion/few_shot/ind_emotion.csv CHANGED
@@ -1 +1,5 @@
1
  Model,Accuracy
 
 
 
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3.1-70B,0.7204545454545455
3
+ Meta-Llama-3-8B,0.4681818181818182
4
+ llama3-8b-cpt-sea-lionv2-base,0.5727272727272728
5
+ Meta-Llama-3.1-8B,0.5318181818181819
results/emotion/few_shot/sst2.csv CHANGED
@@ -1 +1,5 @@
1
  Model,Accuracy
 
 
 
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3.1-70B,0.9036697247706422
3
+ Meta-Llama-3-8B,0.7201834862385321
4
+ llama3-8b-cpt-sea-lionv2-base,0.7282110091743119
5
+ Meta-Llama-3.1-8B,0.8222477064220184
results/emotion/zero_shot/ind_emotion.csv CHANGED
@@ -1,5 +1,13 @@
1
  Model,Accuracy
 
2
  Meta-Llama-3.1-8B-Instruct,0.6545454545454545
 
 
 
 
 
3
  Meta-Llama-3-70B-Instruct,0.6909090909090909
4
  sg_llama3_70b_inst,0.7
 
 
5
  GPT4o_0513,0.7068181818181818
 
1
  Model,Accuracy
2
+ Qwen2-7B-Instruct,0.6545454545454545
3
  Meta-Llama-3.1-8B-Instruct,0.6545454545454545
4
+ Qwen2-72B-Instruct,0.675
5
+ Meta-Llama-3-8B-Instruct,0.6522727272727272
6
+ Meta-Llama-3.1-70B-Instruct,0.7159090909090909
7
+ SeaLLMs-v3-7B-Chat,0.6454545454545455
8
+ gemma-2-9b-it,0.7477272727272727
9
  Meta-Llama-3-70B-Instruct,0.6909090909090909
10
  sg_llama3_70b_inst,0.7
11
+ gemma-2-2b-it,0.6636363636363637
12
+ llama3-8b-cpt-sea-lionv2-instruct,0.6613636363636364
13
  GPT4o_0513,0.7068181818181818
results/emotion/zero_shot/sst2.csv CHANGED
@@ -1,5 +1,13 @@
1
  Model,Accuracy
 
2
  Meta-Llama-3.1-8B-Instruct,0.8646788990825688
 
 
 
 
 
3
  Meta-Llama-3-70B-Instruct,0.9495412844036697
4
  sg_llama3_70b_inst,0.9334862385321101
 
 
5
  GPT4o_0513,0.9415137614678899
 
1
  Model,Accuracy
2
+ Qwen2-7B-Instruct,0.9346330275229358
3
  Meta-Llama-3.1-8B-Instruct,0.8646788990825688
4
+ Qwen2-72B-Instruct,0.9346330275229358
5
+ Meta-Llama-3-8B-Instruct,0.8784403669724771
6
+ Meta-Llama-3.1-70B-Instruct,0.9529816513761468
7
+ SeaLLMs-v3-7B-Chat,0.9403669724770642
8
+ gemma-2-9b-it,0.9311926605504587
9
  Meta-Llama-3-70B-Instruct,0.9495412844036697
10
  sg_llama3_70b_inst,0.9334862385321101
11
+ gemma-2-2b-it,0.9243119266055045
12
+ llama3-8b-cpt-sea-lionv2-instruct,0.9128440366972477
13
  GPT4o_0513,0.9415137614678899
results/flores_translation/few_shot/ind2eng.csv CHANGED
@@ -1 +1,5 @@
1
  Model,BLEU
 
 
 
 
 
1
  Model,BLEU
2
+ Meta-Llama-3.1-70B,0.42145684080212753
3
+ Meta-Llama-3-8B,0.37684086636912956
4
+ llama3-8b-cpt-sea-lionv2-base,0.38065942591799257
5
+ Meta-Llama-3.1-8B,0.38181303557840174
results/flores_translation/few_shot/vie2eng.csv CHANGED
@@ -1 +1,5 @@
1
  Model,BLEU
 
 
 
 
 
1
  Model,BLEU
2
+ Meta-Llama-3.1-70B,0.3486043252859807
3
+ Meta-Llama-3-8B,0.3088281924097908
4
+ llama3-8b-cpt-sea-lionv2-base,0.3101352718812011
5
+ Meta-Llama-3.1-8B,0.31860377848723964
results/flores_translation/few_shot/zho2eng.csv CHANGED
@@ -1 +1,5 @@
1
  Model,BLEU
 
 
 
 
 
1
  Model,BLEU
2
+ Meta-Llama-3.1-70B,0.2784128355061452
3
+ Meta-Llama-3-8B,0.24157503759807666
4
+ llama3-8b-cpt-sea-lionv2-base,0.2196548010627023
5
+ Meta-Llama-3.1-8B,0.23636236548065317
results/flores_translation/few_shot/zsm2eng.csv CHANGED
@@ -1 +1,5 @@
1
  Model,BLEU
 
 
 
 
 
1
  Model,BLEU
2
+ Meta-Llama-3.1-70B,0.4419951682556223
3
+ Meta-Llama-3-8B,0.38778379180318306
4
+ llama3-8b-cpt-sea-lionv2-base,0.373752985045955
5
+ Meta-Llama-3.1-8B,0.39297234157214134
results/flores_translation/zero_shot/ind2eng.csv CHANGED
@@ -1,7 +1,14 @@
1
  Model,BLEU
 
2
  Meta-Llama-3.1-8B-Instruct,0.3765752579792989
 
 
3
  Meta-Llama-3.1-70B-Instruct,0.43366494500251235
 
4
  gemma-2-9b-it,0.40786563079141763
5
  Meta-Llama-3-70B-Instruct,0.3830092775167675
6
  sg_llama3_70b_inst,0.4086440304524362
 
 
7
  GPT4o_0513,0.42589589086974855
 
 
1
  Model,BLEU
2
+ Qwen2-7B-Instruct,0.29408553325533265
3
  Meta-Llama-3.1-8B-Instruct,0.3765752579792989
4
+ Qwen2-72B-Instruct,0.4043588265556185
5
+ Meta-Llama-3-8B-Instruct,0.33079891679041123
6
  Meta-Llama-3.1-70B-Instruct,0.43366494500251235
7
+ SeaLLMs-v3-7B-Chat,0.3594829412574955
8
  gemma-2-9b-it,0.40786563079141763
9
  Meta-Llama-3-70B-Instruct,0.3830092775167675
10
  sg_llama3_70b_inst,0.4086440304524362
11
+ gemma-2-2b-it,0.3482500758113138
12
+ llama3-8b-cpt-sea-lionv2-instruct,0.3916108972514423
13
  GPT4o_0513,0.42589589086974855
14
+ Meta-Llama-3.1-8B,0.008893689222008793
results/flores_translation/zero_shot/vie2eng.csv CHANGED
@@ -1,7 +1,14 @@
1
  Model,BLEU
 
2
  Meta-Llama-3.1-8B-Instruct,0.31019605539004524
 
 
3
  Meta-Llama-3.1-70B-Instruct,0.37244508311079816
 
4
  gemma-2-9b-it,0.3367700653885
5
  Meta-Llama-3-70B-Instruct,0.3230140263371192
6
  sg_llama3_70b_inst,0.34258533717783785
 
 
7
  GPT4o_0513,0.36219303373759176
 
 
1
  Model,BLEU
2
+ Qwen2-7B-Instruct,0.24106736560355876
3
  Meta-Llama-3.1-8B-Instruct,0.31019605539004524
4
+ Qwen2-72B-Instruct,0.33005323227052946
5
+ Meta-Llama-3-8B-Instruct,0.2647448190950291
6
  Meta-Llama-3.1-70B-Instruct,0.37244508311079816
7
+ SeaLLMs-v3-7B-Chat,0.30981028289420137
8
  gemma-2-9b-it,0.3367700653885
9
  Meta-Llama-3-70B-Instruct,0.3230140263371192
10
  sg_llama3_70b_inst,0.34258533717783785
11
+ gemma-2-2b-it,0.27518909199172303
12
+ llama3-8b-cpt-sea-lionv2-instruct,0.327781936019637
13
  GPT4o_0513,0.36219303373759176
14
+ Meta-Llama-3.1-8B,0.0064729173628987014
results/flores_translation/zero_shot/zho2eng.csv CHANGED
@@ -1,7 +1,14 @@
1
  Model,BLEU
 
2
  Meta-Llama-3.1-8B-Instruct,0.23889886925287113
 
 
3
  Meta-Llama-3.1-70B-Instruct,0.2832594176173152
 
4
  gemma-2-9b-it,0.267527968123433
5
  Meta-Llama-3-70B-Instruct,0.24397819518058994
6
  sg_llama3_70b_inst,0.26000707510414633
 
 
7
  GPT4o_0513,0.27722306559544163
 
 
1
  Model,BLEU
2
+ Qwen2-7B-Instruct,0.2113761361724575
3
  Meta-Llama-3.1-8B-Instruct,0.23889886925287113
4
+ Qwen2-72B-Instruct,0.23893268538329387
5
+ Meta-Llama-3-8B-Instruct,0.199495011482748
6
  Meta-Llama-3.1-70B-Instruct,0.2832594176173152
7
+ SeaLLMs-v3-7B-Chat,0.2516593644617717
8
  gemma-2-9b-it,0.267527968123433
9
  Meta-Llama-3-70B-Instruct,0.24397819518058994
10
  sg_llama3_70b_inst,0.26000707510414633
11
+ gemma-2-2b-it,0.21164036008441425
12
+ llama3-8b-cpt-sea-lionv2-instruct,0.2381535278220489
13
  GPT4o_0513,0.27722306559544163
14
+ Meta-Llama-3.1-8B,0.0030426517414972854
results/flores_translation/zero_shot/zsm2eng.csv CHANGED
@@ -1,7 +1,14 @@
1
  Model,BLEU
 
2
  Meta-Llama-3.1-8B-Instruct,0.3700921225177551
 
 
3
  Meta-Llama-3.1-70B-Instruct,0.4462132282683508
 
4
  gemma-2-9b-it,0.4234100394581857
5
  Meta-Llama-3-70B-Instruct,0.3957287030176054
6
  sg_llama3_70b_inst,0.4163761508073963
 
 
7
  GPT4o_0513,0.451496635720668
 
 
1
  Model,BLEU
2
+ Qwen2-7B-Instruct,0.28031997065822994
3
  Meta-Llama-3.1-8B-Instruct,0.3700921225177551
4
+ Qwen2-72B-Instruct,0.40796892621611885
5
+ Meta-Llama-3-8B-Instruct,0.31625368345049
6
  Meta-Llama-3.1-70B-Instruct,0.4462132282683508
7
+ SeaLLMs-v3-7B-Chat,0.3484133510670942
8
  gemma-2-9b-it,0.4234100394581857
9
  Meta-Llama-3-70B-Instruct,0.3957287030176054
10
  sg_llama3_70b_inst,0.4163761508073963
11
+ gemma-2-2b-it,0.33737270487369614
12
+ llama3-8b-cpt-sea-lionv2-instruct,0.38799258214381604
13
  GPT4o_0513,0.451496635720668
14
+ Meta-Llama-3.1-8B,0.00798239824596684
results/fundamental_nlp_tasks/few_shot/c3.csv CHANGED
@@ -1 +1,4 @@
1
  Model,Accuracy
 
 
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3-8B,0.7655198204936425
3
+ llama3-8b-cpt-sea-lionv2-base,0.7995512341062079
4
+ Meta-Llama-3.1-8B,0.8103964098728497
results/fundamental_nlp_tasks/few_shot/cola.csv CHANGED
@@ -1 +1,5 @@
1
  Model,Accuracy
 
 
 
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3.1-70B,0.7248322147651006
3
+ Meta-Llama-3-8B,0.5934803451581975
4
+ llama3-8b-cpt-sea-lionv2-base,0.6203259827420902
5
+ Meta-Llama-3.1-8B,0.6471716203259827
results/fundamental_nlp_tasks/few_shot/mnli.csv CHANGED
@@ -1 +1,5 @@
1
  Model,Accuracy
 
 
 
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3.1-70B,0.7395
3
+ Meta-Llama-3-8B,0.442
4
+ llama3-8b-cpt-sea-lionv2-base,0.456
5
+ Meta-Llama-3.1-8B,0.465
results/fundamental_nlp_tasks/few_shot/mrpc.csv CHANGED
@@ -1 +1,5 @@
1
  Model,Accuracy
 
 
 
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3.1-70B,0.6397058823529411
3
+ Meta-Llama-3-8B,0.5906862745098039
4
+ llama3-8b-cpt-sea-lionv2-base,0.5686274509803921
5
+ Meta-Llama-3.1-8B,0.571078431372549
results/fundamental_nlp_tasks/few_shot/ocnli.csv CHANGED
@@ -1 +1,5 @@
1
  Model,Accuracy
 
 
 
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3.1-70B,0.6786440677966101
3
+ Meta-Llama-3-8B,0.38101694915254236
4
+ llama3-8b-cpt-sea-lionv2-base,0.3871186440677966
5
+ Meta-Llama-3.1-8B,0.4067796610169492
results/fundamental_nlp_tasks/few_shot/qnli.csv CHANGED
@@ -1 +1,5 @@
1
  Model,Accuracy
 
 
 
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3.1-70B,0.5777045579352005
3
+ Meta-Llama-3-8B,0.5028372688998719
4
+ llama3-8b-cpt-sea-lionv2-base,0.500274574409665
5
+ Meta-Llama-3.1-8B,0.500274574409665
results/fundamental_nlp_tasks/few_shot/qqp.csv CHANGED
@@ -1 +1,5 @@
1
  Model,Accuracy
 
 
 
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3.1-70B,0.7365
3
+ Meta-Llama-3-8B,0.54
4
+ llama3-8b-cpt-sea-lionv2-base,0.5295
5
+ Meta-Llama-3.1-8B,0.557
results/fundamental_nlp_tasks/few_shot/rte.csv CHANGED
@@ -1 +1,5 @@
1
  Model,Accuracy
 
 
 
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3.1-70B,0.7725631768953068
3
+ Meta-Llama-3-8B,0.5848375451263538
4
+ llama3-8b-cpt-sea-lionv2-base,0.6570397111913358
5
+ Meta-Llama-3.1-8B,0.6642599277978339
results/fundamental_nlp_tasks/few_shot/wnli.csv CHANGED
@@ -1 +1,5 @@
1
  Model,Accuracy
 
 
 
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3.1-70B,0.7323943661971831
3
+ Meta-Llama-3-8B,0.49295774647887325
4
+ llama3-8b-cpt-sea-lionv2-base,0.43661971830985913
5
+ Meta-Llama-3.1-8B,0.5774647887323944
results/fundamental_nlp_tasks/zero_shot/c3.csv CHANGED
@@ -1,5 +1,13 @@
1
  Model,Accuracy
 
2
  Meta-Llama-3.1-8B-Instruct,0.8672400897531788
 
 
 
 
 
3
  Meta-Llama-3-70B-Instruct,0.9521316379955124
4
  sg_llama3_70b_inst,0.9289454001495886
 
 
5
  GPT4o_0513,0.9648466716529543
 
1
  Model,Accuracy
2
+ Qwen2-7B-Instruct,0.9244577412116679
3
  Meta-Llama-3.1-8B-Instruct,0.8672400897531788
4
+ Qwen2-72B-Instruct,0.9611069558713538
5
+ Meta-Llama-3-8B-Instruct,0.8515332834704562
6
+ Meta-Llama-3.1-70B-Instruct,0.9603590127150337
7
+ SeaLLMs-v3-7B-Chat,0.9143605086013463
8
+ gemma-2-9b-it,0.9222139117427075
9
  Meta-Llama-3-70B-Instruct,0.9521316379955124
10
  sg_llama3_70b_inst,0.9289454001495886
11
+ gemma-2-2b-it,0.7700074794315632
12
+ llama3-8b-cpt-sea-lionv2-instruct,0.8672400897531788
13
  GPT4o_0513,0.9648466716529543
results/fundamental_nlp_tasks/zero_shot/cola.csv CHANGED
@@ -1,5 +1,13 @@
1
  Model,Accuracy
 
2
  Meta-Llama-3.1-8B-Instruct,0.6673058485139022
 
 
 
 
 
3
  Meta-Llama-3-70B-Instruct,0.835091083413231
4
  sg_llama3_70b_inst,0.8696069031639502
 
 
5
  GPT4o_0513,0.8398849472674976
 
1
  Model,Accuracy
2
+ Qwen2-7B-Instruct,0.7871524448705657
3
  Meta-Llama-3.1-8B-Instruct,0.6673058485139022
4
+ Qwen2-72B-Instruct,0.8341323106423778
5
+ Meta-Llama-3-8B-Instruct,0.6548418024928092
6
+ Meta-Llama-3.1-70B-Instruct,0.850431447746884
7
+ SeaLLMs-v3-7B-Chat,0.785234899328859
8
+ gemma-2-9b-it,0.7938638542665388
9
  Meta-Llama-3-70B-Instruct,0.835091083413231
10
  sg_llama3_70b_inst,0.8696069031639502
11
+ gemma-2-2b-it,0.6749760306807286
12
+ llama3-8b-cpt-sea-lionv2-instruct,0.6078619367209971
13
  GPT4o_0513,0.8398849472674976
results/fundamental_nlp_tasks/zero_shot/mnli.csv CHANGED
@@ -1,5 +1,13 @@
1
  Model,Accuracy
 
2
  Meta-Llama-3.1-8B-Instruct,0.4825
 
 
 
 
 
3
  Meta-Llama-3-70B-Instruct,0.6709421285692472
4
  sg_llama3_70b_inst,0.7685
 
 
5
  GPT4o_0513,0.8335
 
1
  Model,Accuracy
2
+ Qwen2-7B-Instruct,0.7295
3
  Meta-Llama-3.1-8B-Instruct,0.4825
4
+ Qwen2-72B-Instruct,0.7925
5
+ Meta-Llama-3-8B-Instruct,0.546
6
+ Meta-Llama-3.1-70B-Instruct,0.7015
7
+ SeaLLMs-v3-7B-Chat,0.653
8
+ gemma-2-9b-it,0.716
9
  Meta-Llama-3-70B-Instruct,0.6709421285692472
10
  sg_llama3_70b_inst,0.7685
11
+ gemma-2-2b-it,0.6185
12
+ llama3-8b-cpt-sea-lionv2-instruct,0.5765
13
  GPT4o_0513,0.8335
results/fundamental_nlp_tasks/zero_shot/mrpc.csv CHANGED
@@ -1,4 +1,13 @@
1
  Model,Accuracy
 
2
  Meta-Llama-3.1-8B-Instruct,0.6740196078431373
 
 
 
 
 
3
  Meta-Llama-3-70B-Instruct,0.7598039215686274
4
  sg_llama3_70b_inst,0.7892156862745098
 
 
 
 
1
  Model,Accuracy
2
+ Qwen2-7B-Instruct,0.7867647058823529
3
  Meta-Llama-3.1-8B-Instruct,0.6740196078431373
4
+ Qwen2-72B-Instruct,0.8063725490196079
5
+ Meta-Llama-3-8B-Instruct,0.678921568627451
6
+ Meta-Llama-3.1-70B-Instruct,0.7696078431372549
7
+ SeaLLMs-v3-7B-Chat,0.7475490196078431
8
+ gemma-2-9b-it,0.7401960784313726
9
  Meta-Llama-3-70B-Instruct,0.7598039215686274
10
  sg_llama3_70b_inst,0.7892156862745098
11
+ gemma-2-2b-it,0.7083333333333334
12
+ llama3-8b-cpt-sea-lionv2-instruct,0.5833333333333334
13
+ GPT4o_0513,0.7377450980392157
results/fundamental_nlp_tasks/zero_shot/ocnli.csv CHANGED
@@ -1,5 +1,13 @@
1
  Model,Accuracy
 
2
  Meta-Llama-3.1-8B-Instruct,0.40983050847457625
 
 
 
 
 
3
  Meta-Llama-3-70B-Instruct,0.5928813559322034
4
  sg_llama3_70b_inst,0.6420338983050847
 
 
5
  GPT4o_0513,0.7308474576271187
 
1
  Model,Accuracy
2
+ Qwen2-7B-Instruct,0.6542372881355932
3
  Meta-Llama-3.1-8B-Instruct,0.40983050847457625
4
+ Qwen2-72B-Instruct,0.7820338983050847
5
+ Meta-Llama-3-8B-Instruct,0.44033898305084745
6
+ Meta-Llama-3.1-70B-Instruct,0.6423728813559322
7
+ SeaLLMs-v3-7B-Chat,0.5698305084745763
8
+ gemma-2-9b-it,0.6189830508474576
9
  Meta-Llama-3-70B-Instruct,0.5928813559322034
10
  sg_llama3_70b_inst,0.6420338983050847
11
+ gemma-2-2b-it,0.43322033898305085
12
+ llama3-8b-cpt-sea-lionv2-instruct,0.45559322033898303
13
  GPT4o_0513,0.7308474576271187
results/fundamental_nlp_tasks/zero_shot/qnli.csv CHANGED
@@ -1,4 +1,13 @@
1
  Model,Accuracy
 
2
  Meta-Llama-3.1-8B-Instruct,0.5777045579352005
 
 
 
 
 
3
  Meta-Llama-3-70B-Instruct,0.876807614863628
4
  sg_llama3_70b_inst,0.9004210140948197
 
 
 
 
1
  Model,Accuracy
2
+ Qwen2-7B-Instruct,0.8154859967051071
3
  Meta-Llama-3.1-8B-Instruct,0.5777045579352005
4
+ Qwen2-72B-Instruct,0.8887058392824455
5
+ Meta-Llama-3-8B-Instruct,0.6025993044114956
6
+ Meta-Llama-3.1-70B-Instruct,0.9026176093721399
7
+ SeaLLMs-v3-7B-Chat,0.7159070107999268
8
+ gemma-2-9b-it,0.9070107999267801
9
  Meta-Llama-3-70B-Instruct,0.876807614863628
10
  sg_llama3_70b_inst,0.9004210140948197
11
+ gemma-2-2b-it,0.7792421746293245
12
+ llama3-8b-cpt-sea-lionv2-instruct,0.6101043382756727
13
+ GPT4o_0513,0.9304411495515285
results/fundamental_nlp_tasks/zero_shot/qqp.csv CHANGED
@@ -1,5 +1,13 @@
1
  Model,Accuracy
 
2
  Meta-Llama-3.1-8B-Instruct,0.5645
 
 
 
 
 
3
  Meta-Llama-3-70B-Instruct,0.7876082117239673
4
  sg_llama3_70b_inst,0.804
 
 
5
  GPT4o_0513,0.8085
 
1
  Model,Accuracy
2
+ Qwen2-7B-Instruct,0.781
3
  Meta-Llama-3.1-8B-Instruct,0.5645
4
+ Qwen2-72B-Instruct,0.8065
5
+ Meta-Llama-3-8B-Instruct,0.563
6
+ Meta-Llama-3.1-70B-Instruct,0.815
7
+ SeaLLMs-v3-7B-Chat,0.7625
8
+ gemma-2-9b-it,0.7775
9
  Meta-Llama-3-70B-Instruct,0.7876082117239673
10
  sg_llama3_70b_inst,0.804
11
+ gemma-2-2b-it,0.761
12
+ llama3-8b-cpt-sea-lionv2-instruct,0.6225
13
  GPT4o_0513,0.8085
results/fundamental_nlp_tasks/zero_shot/rte.csv CHANGED
@@ -1,4 +1,13 @@
1
  Model,Accuracy
 
2
  Meta-Llama-3.1-8B-Instruct,0.6750902527075813
 
 
 
 
 
3
  Meta-Llama-3-70B-Instruct,0.8086642599277978
4
  sg_llama3_70b_inst,0.8916967509025271
 
 
 
 
1
  Model,Accuracy
2
+ Qwen2-7B-Instruct,0.8231046931407943
3
  Meta-Llama-3.1-8B-Instruct,0.6750902527075813
4
+ Qwen2-72B-Instruct,0.8447653429602888
5
+ Meta-Llama-3-8B-Instruct,0.6173285198555957
6
+ Meta-Llama-3.1-70B-Instruct,0.8483754512635379
7
+ SeaLLMs-v3-7B-Chat,0.7870036101083032
8
+ gemma-2-9b-it,0.7472924187725631
9
  Meta-Llama-3-70B-Instruct,0.8086642599277978
10
  sg_llama3_70b_inst,0.8916967509025271
11
+ gemma-2-2b-it,0.7292418772563177
12
+ llama3-8b-cpt-sea-lionv2-instruct,0.6859205776173285
13
+ GPT4o_0513,0.8700361010830325
results/fundamental_nlp_tasks/zero_shot/wnli.csv CHANGED
@@ -1,4 +1,13 @@
1
  Model,Accuracy
 
2
  Meta-Llama-3.1-8B-Instruct,0.49295774647887325
 
 
 
 
 
3
  Meta-Llama-3-70B-Instruct,0.7887323943661971
4
  sg_llama3_70b_inst,0.8309859154929577
 
 
 
 
1
  Model,Accuracy
2
+ Qwen2-7B-Instruct,0.7183098591549296
3
  Meta-Llama-3.1-8B-Instruct,0.49295774647887325
4
+ Qwen2-72B-Instruct,0.8873239436619719
5
+ Meta-Llama-3-8B-Instruct,0.4788732394366197
6
+ Meta-Llama-3.1-70B-Instruct,0.8450704225352113
7
+ SeaLLMs-v3-7B-Chat,0.5915492957746479
8
+ gemma-2-9b-it,0.7746478873239436
9
  Meta-Llama-3-70B-Instruct,0.7887323943661971
10
  sg_llama3_70b_inst,0.8309859154929577
11
+ gemma-2-2b-it,0.43661971830985913
12
+ llama3-8b-cpt-sea-lionv2-instruct,0.5774647887323944
13
+ GPT4o_0513,0.9295774647887324