readme description and evaluate-0 results
Browse files
README.md
CHANGED
@@ -26,6 +26,10 @@ tags:
|
|
26 |
|
27 |
![logo](./misc/logo.png)
|
28 |
|
|
|
|
|
|
|
|
|
29 |
[loss, val_loss](https://api.wandb.ai/links/mtasic85/z591qpyv)
|
30 |
|
31 |
[val_ppl](https://api.wandb.ai/links/mtasic85/lku5hbws)
|
@@ -36,83 +40,53 @@ tags:
|
|
36 |
|
37 |
## lm-evaluation-harness
|
38 |
|
39 |
-
|
|
40 |
-
|
41 |
-
|
|
42 |
-
|
|
43 |
-
|
|
44 |
-
|
|
45 |
-
|
|
46 |
-
|
|
47 |
-
|
|
48 |
-
|
|
49 |
-
| -
|
50 |
-
| -
|
51 |
-
| -
|
52 |
-
| -
|
53 |
-
| -
|
54 |
-
| -
|
55 |
-
| -
|
56 |
-
| -
|
57 |
-
| -
|
58 |
-
| -
|
59 |
-
| -
|
60 |
-
| -
|
61 |
-
| -
|
62 |
-
|
|
63 |
-
| -
|
64 |
-
| -
|
65 |
-
| -
|
66 |
-
| -
|
67 |
-
|
|
68 |
-
| -
|
69 |
-
| -
|
70 |
-
| -
|
71 |
-
|
|
72 |
-
|
|
73 |
-
|
|
74 |
-
|
|
75 |
-
|
|
76 |
-
|
|
77 |
-
| -
|
78 |
-
| -
|
79 |
-
| -
|
80 |
-
| -
|
81 |
-
| -
|
82 |
-
| -
|
83 |
-
|
|
84 |
-
|
|
85 |
-
| -
|
86 |
-
| -
|
87 |
-
| -
|
88 |
-
| - us_foreign_policy | 1|none | 0|acc |↑ |0.2700|± |0.0446|
|
89 |
-
| - stem | 2|none | |acc |↑ |0.2058|± |0.0072|
|
90 |
-
| - abstract_algebra | 1|none | 0|acc |↑ |0.1700|± |0.0378|
|
91 |
-
| - anatomy | 1|none | 0|acc |↑ |0.1778|± |0.0330|
|
92 |
-
| - astronomy | 1|none | 0|acc |↑ |0.1776|± |0.0311|
|
93 |
-
| - college_biology | 1|none | 0|acc |↑ |0.2569|± |0.0365|
|
94 |
-
| - college_chemistry | 1|none | 0|acc |↑ |0.1900|± |0.0394|
|
95 |
-
| - college_computer_science | 1|none | 0|acc |↑ |0.2600|± |0.0441|
|
96 |
-
| - college_mathematics | 1|none | 0|acc |↑ |0.2100|± |0.0409|
|
97 |
-
| - college_physics | 1|none | 0|acc |↑ |0.2059|± |0.0402|
|
98 |
-
| - computer_security | 1|none | 0|acc |↑ |0.2400|± |0.0429|
|
99 |
-
| - conceptual_physics | 1|none | 0|acc |↑ |0.2681|± |0.0290|
|
100 |
-
| - electrical_engineering | 1|none | 0|acc |↑ |0.2345|± |0.0353|
|
101 |
-
| - elementary_mathematics | 1|none | 0|acc |↑ |0.2011|± |0.0206|
|
102 |
-
| - high_school_biology | 1|none | 0|acc |↑ |0.1806|± |0.0219|
|
103 |
-
| - high_school_chemistry | 1|none | 0|acc |↑ |0.1478|± |0.0250|
|
104 |
-
| - high_school_computer_science | 1|none | 0|acc |↑ |0.2400|± |0.0429|
|
105 |
-
| - high_school_mathematics | 1|none | 0|acc |↑ |0.2111|± |0.0249|
|
106 |
-
| - high_school_physics | 1|none | 0|acc |↑ |0.1788|± |0.0313|
|
107 |
-
| - high_school_statistics | 1|none | 0|acc |↑ |0.1620|± |0.0251|
|
108 |
-
| - machine_learning | 1|none | 0|acc |↑ |0.2768|± |0.0425|
|
109 |
-
|truthfulqa_mc2 | 2|none | 0|acc |↑ |0.4975|± |0.0165|
|
110 |
-
|winogrande | 1|none | 0|acc |↑ |0.5146|± |0.0140|
|
111 |
|
112 |
-
| Groups |Version|Filter|n-shot|Metric| |Value | |Stderr|
|
113 |
-
|------------------|------:|------|------|------|---|-----:|---|-----:|
|
114 |
-
|mmlu | 2|none | |acc |↑ |0.2280|± |0.0035|
|
115 |
-
| - humanities | 2|none | |acc |↑ |0.2376|± |0.0062|
|
116 |
-
| - other | 2|none | |acc |↑ |0.2481|± |0.0077|
|
117 |
-
| - social sciences| 2|none | |acc |↑ |0.2155|± |0.0074|
|
118 |
-
| - stem | 2|none | |acc |↑ |0.2058|± |0.0072|
|
|
|
26 |
|
27 |
![logo](./misc/logo.png)
|
28 |
|
29 |
+
A pretrained language model based on the Llama model with about **33M** parameters. This model has been trained on **9.7B** (`9,782,206,713`) tokens from more than **5.2M** (`5,285,575`) dataset rows.
|
30 |
+
|
31 |
+
This model is designed not for immediate use but rather for Continued Pretraining and Finetuning on a downstream task. While it can handle a context length of up to **32K** (`32,768`) tokens, it was pretrained with sequences of **2K** (`2048`) tokens.
|
32 |
+
|
33 |
[loss, val_loss](https://api.wandb.ai/links/mtasic85/z591qpyv)
|
34 |
|
35 |
[val_ppl](https://api.wandb.ai/links/mtasic85/lku5hbws)
|
|
|
40 |
|
41 |
## lm-evaluation-harness
|
42 |
|
43 |
+
| Tasks |Version|Filter|n-shot| Metric | |Value | |Stderr|
|
44 |
+
|-----------------------------------------------------------|-------|------|-----:|-----------------------|---|-----:|---|------|
|
45 |
+
|leaderboard | N/A| | | | | | | |
|
46 |
+
| - leaderboard_bbh | N/A| | | | | | | |
|
47 |
+
| - leaderboard_bbh_boolean_expressions | 1|none | 3|acc_norm |↑ |0.4600|± |0.0316|
|
48 |
+
| - leaderboard_bbh_causal_judgement | 1|none | 3|acc_norm |↑ |0.5187|± |0.0366|
|
49 |
+
| - leaderboard_bbh_date_understanding | 1|none | 3|acc_norm |↑ |0.1600|± |0.0232|
|
50 |
+
| - leaderboard_bbh_disambiguation_qa | 1|none | 3|acc_norm |↑ |0.3000|± |0.0290|
|
51 |
+
| - leaderboard_bbh_formal_fallacies | 1|none | 3|acc_norm |↑ |0.4680|± |0.0316|
|
52 |
+
| - leaderboard_bbh_geometric_shapes | 1|none | 3|acc_norm |↑ |0.0400|± |0.0124|
|
53 |
+
| - leaderboard_bbh_hyperbaton | 1|none | 3|acc_norm |↑ |0.5240|± |0.0316|
|
54 |
+
| - leaderboard_bbh_logical_deduction_five_objects | 1|none | 3|acc_norm |↑ |0.2240|± |0.0264|
|
55 |
+
| - leaderboard_bbh_logical_deduction_seven_objects | 1|none | 3|acc_norm |↑ |0.1480|± |0.0225|
|
56 |
+
| - leaderboard_bbh_logical_deduction_three_objects | 1|none | 3|acc_norm |↑ |0.3160|± |0.0295|
|
57 |
+
| - leaderboard_bbh_movie_recommendation | 1|none | 3|acc_norm |↑ |0.2560|± |0.0277|
|
58 |
+
| - leaderboard_bbh_navigate | 1|none | 3|acc_norm |↑ |0.4200|± |0.0313|
|
59 |
+
| - leaderboard_bbh_object_counting | 1|none | 3|acc_norm |↑ |0.0680|± |0.0160|
|
60 |
+
| - leaderboard_bbh_penguins_in_a_table | 1|none | 3|acc_norm |↑ |0.1918|± |0.0327|
|
61 |
+
| - leaderboard_bbh_reasoning_about_colored_objects | 1|none | 3|acc_norm |↑ |0.1160|± |0.0203|
|
62 |
+
| - leaderboard_bbh_ruin_names | 1|none | 3|acc_norm |↑ |0.2000|± |0.0253|
|
63 |
+
| - leaderboard_bbh_salient_translation_error_detection | 1|none | 3|acc_norm |↑ |0.1320|± |0.0215|
|
64 |
+
| - leaderboard_bbh_snarks | 1|none | 3|acc_norm |↑ |0.4719|± |0.0375|
|
65 |
+
| - leaderboard_bbh_sports_understanding | 1|none | 3|acc_norm |↑ |0.4600|± |0.0316|
|
66 |
+
| - leaderboard_bbh_temporal_sequences | 1|none | 3|acc_norm |↑ |0.2880|± |0.0287|
|
67 |
+
| - leaderboard_bbh_tracking_shuffled_objects_five_objects | 1|none | 3|acc_norm |↑ |0.1880|± |0.0248|
|
68 |
+
| - leaderboard_bbh_tracking_shuffled_objects_seven_objects| 1|none | 3|acc_norm |↑ |0.1280|± |0.0212|
|
69 |
+
| - leaderboard_bbh_tracking_shuffled_objects_three_objects| 1|none | 3|acc_norm |↑ |0.3040|± |0.0292|
|
70 |
+
| - leaderboard_bbh_web_of_lies | 1|none | 3|acc_norm |↑ |0.4880|± |0.0317|
|
71 |
+
| - leaderboard_gpqa | N/A| | | | | | | |
|
72 |
+
| - leaderboard_gpqa_diamond | 1|none | 0|acc_norm |↑ |0.2222|± |0.0296|
|
73 |
+
| - leaderboard_gpqa_extended | 1|none | 0|acc_norm |↑ |0.2381|± |0.0182|
|
74 |
+
| - leaderboard_gpqa_main | 1|none | 0|acc_norm |↑ |0.2478|± |0.0204|
|
75 |
+
| - leaderboard_ifeval | 3|none | 0|inst_level_loose_acc |↑ |0.1859|± | N/A|
|
76 |
+
| | |none | 0|inst_level_strict_acc |↑ |0.1811|± | N/A|
|
77 |
+
| | |none | 0|prompt_level_loose_acc |↑ |0.0906|± |0.0124|
|
78 |
+
| | |none | 0|prompt_level_strict_acc|↑ |0.0832|± |0.0119|
|
79 |
+
| - leaderboard_math_hard | N/A| | | | | | | |
|
80 |
+
| - leaderboard_math_algebra_hard | 1|none | 4|exact_match |↑ |0.0000|± | 0|
|
81 |
+
| - leaderboard_math_counting_and_prob_hard | 1|none | 4|exact_match |↑ |0.0000|± | 0|
|
82 |
+
| - leaderboard_math_geometry_hard | 1|none | 4|exact_match |↑ |0.0000|± | 0|
|
83 |
+
| - leaderboard_math_intermediate_algebra_hard | 1|none | 4|exact_match |↑ |0.0000|± | 0|
|
84 |
+
| - leaderboard_math_num_theory_hard | 1|none | 4|exact_match |↑ |0.0000|± | 0|
|
85 |
+
| - leaderboard_math_prealgebra_hard | 1|none | 4|exact_match |↑ |0.0000|± | 0|
|
86 |
+
| - leaderboard_math_precalculus_hard | 1|none | 4|exact_match |↑ |0.0000|± | 0|
|
87 |
+
| - leaderboard_mmlu_pro | 0.1|none | 5|acc |↑ |0.1100|± |0.0029|
|
88 |
+
| - leaderboard_musr | N/A| | | | | | | |
|
89 |
+
| - leaderboard_musr_murder_mysteries | 1|none | 0|acc_norm |↑ |0.5000|± |0.0317|
|
90 |
+
| - leaderboard_musr_object_placements | 1|none | 0|acc_norm |↑ |0.2930|± |0.0285|
|
91 |
+
| - leaderboard_musr_team_allocation | 1|none | 0|acc_norm |↑ |0.3720|± |0.0306|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
92 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|