djstrong commited on
Commit
f2a3e70
β€’
1 Parent(s): de1d88f
Files changed (2) hide show
  1. app.py +24 -3
  2. metadata.json +316 -0
app.py CHANGED
@@ -1,3 +1,4 @@
 
1
  import re
2
 
3
  import gradio as gr
@@ -25,12 +26,17 @@ with demo:
25
  header = [h.strip() for h in header]
26
  for i, line in enumerate(f):
27
  leaderboard_df.append(line.strip().split(",", 13))
 
 
 
 
 
28
  # create dataframe from list and header
29
  leaderboard_df = pd.DataFrame(leaderboard_df, columns=header)
30
  # filter column with value eq-bench_v2_pl
31
  print(header)
32
  leaderboard_df = leaderboard_df[(leaderboard_df["Benchmark Version"] == "eq-bench_v2_pl") | (
33
- leaderboard_df["Benchmark Version"] == 'eq-bench_pl')]
34
  # fix: ValueError: The truth value of a Series is ambiguous. Use a.empty, a.bool(), a.item(), a.any() or a.all().
35
 
36
  # leave only defined columns
@@ -48,6 +54,18 @@ with demo:
48
  leaderboard_df["Num Questions Parseable"] = leaderboard_df[["Num Questions Parseable", "Error"]].apply(
49
  lambda x: parse_parseable(x), axis=1)
50
 
 
 
 
 
 
 
 
 
 
 
 
 
51
  leaderboard_df["Model Path"] = leaderboard_df["Model Path"].apply(lambda x: make_clickable_model(x))
52
 
53
  # change value of column to nan
@@ -64,21 +82,24 @@ with demo:
64
  leaderboard_df = leaderboard_df.sort_values(by=["Benchmark Score", "Num Questions Parseable"],
65
  ascending=[False, False])
66
 
67
- #rename column
68
  leaderboard_df = leaderboard_df.rename(columns={"Model Path": "Model"})
69
 
70
  leaderboard_df_styled = leaderboard_df.style.background_gradient(cmap="RdYlGn")
 
 
71
  rounding = {}
72
  # for col in ["Benchmark Score", "Num Questions Parseable"]:
73
 
74
  rounding["Benchmark Score"] = "{:.2f}"
75
  rounding["Num Questions Parseable"] = "{:.0f}"
 
76
  leaderboard_df_styled = leaderboard_df_styled.format(rounding)
77
 
78
  leaderboard_table = gr.components.Dataframe(
79
  value=leaderboard_df_styled,
80
  # headers=[c.name for c in fields(AutoEvalColumn) if c.never_hidden] + shown_columns.value,
81
- datatype=['markdown', 'number', 'number', 'str'],
82
  elem_id="leaderboard-table",
83
  interactive=False,
84
  visible=True,
 
1
+ import json
2
  import re
3
 
4
  import gradio as gr
 
26
  header = [h.strip() for h in header]
27
  for i, line in enumerate(f):
28
  leaderboard_df.append(line.strip().split(",", 13))
29
+
30
+ metadata = json.load(open('metadata.json'))
31
+ for k, v in list(metadata.items()):
32
+ metadata[k.split(",")[0]] = v
33
+
34
  # create dataframe from list and header
35
  leaderboard_df = pd.DataFrame(leaderboard_df, columns=header)
36
  # filter column with value eq-bench_v2_pl
37
  print(header)
38
  leaderboard_df = leaderboard_df[(leaderboard_df["Benchmark Version"] == "eq-bench_v2_pl") | (
39
+ leaderboard_df["Benchmark Version"] == 'eq-bench_pl')]
40
  # fix: ValueError: The truth value of a Series is ambiguous. Use a.empty, a.bool(), a.item(), a.any() or a.all().
41
 
42
  # leave only defined columns
 
54
  leaderboard_df["Num Questions Parseable"] = leaderboard_df[["Num Questions Parseable", "Error"]].apply(
55
  lambda x: parse_parseable(x), axis=1)
56
 
57
+
58
+ def get_params(model_name):
59
+ if model_name in metadata:
60
+ return metadata[model_name]
61
+ return numpy.nan
62
+
63
+
64
+ leaderboard_df["Params"] = leaderboard_df["Model Path"].apply(lambda x: get_params(x))
65
+
66
+ # move column order
67
+ leaderboard_df = leaderboard_df[["Model Path", "Params", "Benchmark Score", "Num Questions Parseable", 'Error']]
68
+
69
  leaderboard_df["Model Path"] = leaderboard_df["Model Path"].apply(lambda x: make_clickable_model(x))
70
 
71
  # change value of column to nan
 
82
  leaderboard_df = leaderboard_df.sort_values(by=["Benchmark Score", "Num Questions Parseable"],
83
  ascending=[False, False])
84
 
85
+ # rename column
86
  leaderboard_df = leaderboard_df.rename(columns={"Model Path": "Model"})
87
 
88
  leaderboard_df_styled = leaderboard_df.style.background_gradient(cmap="RdYlGn")
89
+ leaderboard_df_styled = leaderboard_df_styled.background_gradient(cmap="RdYlGn_r", subset=['Params'])
90
+
91
  rounding = {}
92
  # for col in ["Benchmark Score", "Num Questions Parseable"]:
93
 
94
  rounding["Benchmark Score"] = "{:.2f}"
95
  rounding["Num Questions Parseable"] = "{:.0f}"
96
+ rounding["Params"] = "{:.0f}"
97
  leaderboard_df_styled = leaderboard_df_styled.format(rounding)
98
 
99
  leaderboard_table = gr.components.Dataframe(
100
  value=leaderboard_df_styled,
101
  # headers=[c.name for c in fields(AutoEvalColumn) if c.never_hidden] + shown_columns.value,
102
+ datatype=['markdown', 'number', 'number', 'number', 'str'],
103
  elem_id="leaderboard-table",
104
  interactive=False,
105
  visible=True,
metadata.json ADDED
@@ -0,0 +1,316 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "Azurro/APT3-1B-Base": 1,
3
+ "HuggingFaceH4/zephyr-7b-alpha": 7,
4
+ "Voicelab/trurl-2-13b-academic": 13,
5
+ "HuggingFaceH4/zephyr-7b-beta": 7,
6
+ "Voicelab/trurl-2-7b": 7,
7
+ "mistralai/Mistral-7B-v0.1": 7,
8
+ "mistralai/Mistral-7B-v0.1,peft=lora/output/mistral-7b-v0.1-lora-pl/checkpoint-400/adapter_model": 7,
9
+ "mistralai/Mistral-7B-v0.1,peft=lora/output/mistral-7b-v0.1-lora-pl/checkpoint-200/adapter_model": 7,
10
+ "mistralai/Mistral-7B-v0.1,load_in_8bit=True": 7,
11
+ "Nondzu/zephyr-speakleash-007-pl-8192-32-16-0.05": 7,
12
+ "openchat/openchat-3.5-0106": 7,
13
+ "mistralai/Mistral-7B-v0.1,peft=lora/output/mistral-7b-v0.1-lora-pl/checkpoint-2000/adapter_model": 7,
14
+ "mistralai/Mistral-7B-v0.1,peft=lora/output/mistral-7b-v0.1-lora-pl/checkpoint-2200/adapter_model": 7,
15
+ "mistralai/Mistral-7B-Instruct-v0.1": 7,
16
+ "APT3-1B-Instruct-e1": 1,
17
+ "mistralai/Mistral-7B-v0.1,peft=lora/output/mistral-7b-v0.1-lora-pl/checkpoint-800/adapter_model": 7,
18
+ "mistralai/Mistral-7B-v0.1,peft=lora/output/mistral-7b-v0.1-lora-pl/checkpoint-600/adapter_model": 7,
19
+ "APT3-1B-Instruct-e2": 1,
20
+ "mistralai/Mistral-7B-v0.1,load_in_4bit=True": 7,
21
+ "speakleash/3-5B_high_base/epoch_2_hf": 3.5,
22
+ "speakleash/3-5B_high_base/epoch_1_hf": 3.5,
23
+ "speakleash/3-5B_high_base/epoch_0_hf": 3.5,
24
+ "speakleash/7B_high_base/epoch_1_hf": 7,
25
+ "speakleash/7B_high_base/epoch_0_hf": 7,
26
+ "Nondzu/zephyr-speakleash-010-pl-3072-32-16-0.01": 7,
27
+ "google/mt5-xl": 3.7,
28
+ "speakleash/7B_high_sft/epoch_2_base/epoch_2_hf": 7,
29
+ "OPI-PG/Qra-1b": 1,
30
+ "OPI-PG/Qra-13b": 13,
31
+ "OPI-PG/Qra-7b": 7,
32
+ "teknium/OpenHermes-2.5-Mistral-7B": 7,
33
+ "openchat/openchat-3.5-1210": 7,
34
+ "speakleash/apt3-1B_base/apt3-1B-sequential_hf": 1,
35
+ "speakleash/apt3-1B_base/apt3-1B-shuffled_hf": 1,
36
+ "speakleash/1B_high_base/like_apt3-1B_hf": 1,
37
+ "speakleash/1B_high_base/epoch_3_hf": 1,
38
+ "speakleash/7B_high_sft/epoch_1_base/epoch_2_hf": 7,
39
+ "speakleash/7B_high_sft/epoch_1_base/epoch_1_hf": 7,
40
+ "speakleash/7B_high_sft/epoch_0_base/epoch_0_hf": 7,
41
+ "speakleash/7B_high_sft/epoch_2_base/epoch_1_hf": 7,
42
+ "speakleash/3-5B_high_sft/epoch_3_base/epoch_2_hf": 3.5,
43
+ "allegro/plt5-large": 0.82,
44
+ "internlm/internlm2-7b": 7,
45
+ "sdadas/polish-gpt2-xl": 1.67,
46
+ "speakleash/1B_4k_high_sft/epoch_3_base/epoch_1_hf": 1,
47
+ "speakleash/mistral-PL_7B/epoch_0_hf": 7,
48
+ "speakleash/1B_high_sft/epoch_3_base/epoch_1_hf": 1,
49
+ "speakleash/polish-mistral-7B/epoch_0_hf": 7,
50
+ "speakleash/3-5B_high_sft/epoch_0_base/epoch_2_hf": 3.5,
51
+ "speakleash/3-5B_high_sft/epoch_0_base/epoch_1_hf": 3.5,
52
+ "speakleash/3-5B_high_sft/epoch_0_base/epoch_0_hf": 3.5,
53
+ "speakleash/7B_high_base/epoch_2_hf": 7,
54
+ "speakleash/10B-4k_high_sft/epoch_3_base/epoch_1_hf": 10,
55
+ "speakleash/3-5B_high_base/epoch_3_hf": 3.5,
56
+ "microsoft/phi-2": 2.7,
57
+ "RWKV/HF_v5-Eagle-7B": 7,
58
+ "mistralai/Mistral-7B-Instruct-v0.2": 7,
59
+ "speakleash/llama-apt3-7B/only-spi-e0_hf": 7,
60
+ "speakleash/llama-apt3-7B/spkl-only_sft/e4_hf": 7,
61
+ "speakleash/llama-apt3-7B/spkl-only_sft/e5_hf": 7,
62
+ "speakleash/llama-apt3-7B/spkl-only_sft/e3_hf": 7,
63
+ "speakleash/llama-apt3-7B/spkl-only_sft/e2_hf": 7,
64
+ "meta-llama/Llama-2-7b-hf": 7,
65
+ "meta-llama/Llama-2-7b-chat-hf": 7,
66
+ "internlm/internlm2-chat-7b": 7,
67
+ "internlm/internlm2-base-7b": 7,
68
+ "internlm/internlm2-1_8b": 1.8,
69
+ "internlm/internlm2-chat-1_8b": 1.8,
70
+ "speakleash/mistral-apt3-7B/only-spi_sft/e0_hf": 7,
71
+ "speakleash/mistral-apt3-7B/only-spi-e0_hf": 7,
72
+ "speakleash/mistral-apt3-7B/apt3-e0_hf": 7,
73
+ "speakleash/mistral-apt3-7B/spi-e0_hf": 7,
74
+ "speakleash/mistral-apt3-7B/spkl_sft_v2/e4_hf": 7,
75
+ "speakleash/mistral-apt3-7B/spkl_sft_v2/e5_hf": 7,
76
+ "speakleash/mistral-apt3-7B/spkl_sft_v2/e3_hf": 7,
77
+ "speakleash/mistral-apt3-7B/spkl_sft_v2/e2_hf": 7,
78
+ "speakleash/mistral-apt3-7B/only-spi_sft_v2/e4_bb62a5b8": 7,
79
+ "speakleash/mistral-apt3-7B/only-spi_sft_v2/e6_6b0aa8d6": 7,
80
+ "speakleash/mistral-apt3-7B/only-spi_sft_v2/e3_f8b5e568": 7,
81
+ "speakleash/mistral-apt3-7B/only-spi_sft_v2/e2_3b7fc53e": 7,
82
+ "speakleash/mistral-apt3-7B/only-spi_sft_v2/e5_f75cbc76": 7,
83
+ "speakleash/mistral-apt3-7B/only-spi_sft_v2/e7_642f3822": 7,
84
+ "speakleash/mistral-apt3-7B/spkl_sft/e3_17ef3119": 7,
85
+ "speakleash/mistral-apt3-7B/spkl_sft/e2_7dc8df86": 7,
86
+ "google/gemma-7b": 7,
87
+ "google/gemma-7b-it": 7,
88
+ "SOTA FT HerBERT (large)": 1,
89
+ "Baseline (majority class)": 0,
90
+ "SOTA FT Polish RoBERTa": 1,
91
+ "SOTA FT ULMFiT-SP-PL": 0.1,
92
+ "speakleash/llama-apt3-13B/spkl-plus/e0_caa5ad79": 13,
93
+ "speakleash/llama-apt3-13B/spkl-only/e0_cc0931c5": 13,
94
+ "eryk-mazus/polka-1.1b": 1.1,
95
+ "berkeley-nest/Starling-LM-7B-alpha": 7,
96
+ "Remek/OpenChat3.5-0106-Spichlerz-Inst-001": 7,
97
+ "speakleash/mistral_7B-v2/spkl-all-e2_5bd6027d": 7,
98
+ "speakleash/mistral_7B-v2/spkl-all-e0_8cf0987d": 7,
99
+ "speakleash/mistral_7B-v2/spkl-only-e0_ef715d74": 7,
100
+ "speakleash/mistral_7B-v2/spkl-only-e1_333887a5": 7,
101
+ "speakleash/mistral_7B-v2/spkl-all-e1_0b514ce9": 7,
102
+ "speakleash/mistral_7B-v2/spkl-only-e2_5dac700d": 7,
103
+ "speakleash/llama-apt3-13B/spkl-only_e0_sft/ext_e3_23b6bc9b": 13,
104
+ "speakleash/llama-apt3-13B/spkl-only_e0_sft/spkl_e4_e3a666b1": 13,
105
+ "speakleash/llama-apt3-13B/spkl-only_e0_sft/spkl_e3_45ef6b63": 13,
106
+ "speakleash/llama-apt3-13B/spkl-only_e0_sft/spkl_e5_bf95416b": 13,
107
+ "speakleash/llama-apt3-13B/spkl-only_e0_sft/ext_e2_f7606252": 13,
108
+ "speakleash/llama-apt3-13B/spkl-only_e0_sft/spkl_e2_898ae6c6": 13,
109
+ "speakleash/apt4-1B/spkl-only-e3_756856c4": 1,
110
+ "speakleash/apt4-1B/spkl-all-e0_7f6a991e": 1,
111
+ "speakleash/apt4-1B/spkl-only-e2_969e76b4": 1,
112
+ "speakleash/apt4-1B/spkl-all-e2_bfb44ded": 1,
113
+ "speakleash/apt4-1B/spkl-all-e3_063753f9": 1,
114
+ "speakleash/apt4-1B/spkl-all-e1_74a293c8": 1,
115
+ "speakleash/apt4-1B/spkl-only-e0_b9c8bb39": 1,
116
+ "speakleash/apt4-1B/spkl-only-e1_fea4b41b": 1,
117
+ "upstage/SOLAR-10.7B-Instruct-v1.0": 10.7,
118
+ "upstage/SOLAR-10.7B-v1.0": 10.7,
119
+ "speakleash/mistral_7B-v2/spkl-all_sft/e1_base/spkl-all-e1_9aee511a": 7,
120
+ "speakleash/mistral_7B-v2/spkl-all_sft/e1_base/spkl-all-e0_dd9d2777": 7,
121
+ "speakleash/mistral_7B-v2/spkl-only_sft/e1_base/spkl-only-e1_d0ac34b7": 7,
122
+ "speakleash/mistral_7B-v2/spkl-only_sft/e1_base/spkl-only-e0_9eea5944": 7,
123
+ "Remek/Kruk-7B-SP-001": 7,
124
+ "TinyLlama/TinyLlama-1.1B-Chat-v1.0": 1.1,
125
+ "internlm/internlm2-chat-7b-sft": 7,
126
+ "speakleash/mistral_7B-v2/spkl-all_sft/e1_base/spkl-all-e3_72a6c52a": 7,
127
+ "speakleash/mistral_7B-v2/spkl-only_sft/e1_base/spkl-only-e3_08a0fd89": 7,
128
+ "speakleash/mistral_7B-v2/spkl-all_sft/e1_base/spkl-all-e2_0a1a62c0": 7,
129
+ "speakleash/mistral_7B-v2/spkl-only_sft/e1_base/spkl-only-e2_a7c66ac5": 7,
130
+ "speakleash/mistral-apt3-7B_v2/spkl-only_2e5-e0_116fa2bc": 7,
131
+ "speakleash/mistral-apt3-7B_v2/spkl-only_7e6-e0_8544bbd3": 7,
132
+ "speakleash/mistral-apt3-7B_v2/spkl-only_2e5-e1_013bd434": 7,
133
+ "speakleash/mistral-apt3-7B_v2/spkl-only-e1_87bfffac": 7,
134
+ "speakleash/mistral-apt3-7B_v2/spkl-only-e2_939d897f": 7,
135
+ "speakleash/mistral-apt3-7B_v2/spkl-only-e0_2a5be0dc": 7,
136
+ "speakleash/mistral-apt3-7B_v2/spkl-only_sft/e1_base_2e5/spkl-only-e1_0303962d": 7,
137
+ "speakleash/mistral-apt3-7B_v2/spkl-only_sft/e1_base_2e5/spkl-only-e0_f4aaf490": 7,
138
+ "speakleash/mistral-apt3-7B_v2/spkl-only_sft/e0_base_2e5/spkl-only-e0_009b090e": 7,
139
+ "speakleash/mistral-apt3-7B_v2/spkl-only_sft/e0_base_2e5/spkl-only-e1_91aae327": 7,
140
+ "speakleash/mistral_7B-v2/spkl-only_sft_v2/e1_base/spkl-only_7e6w-e1_14d52992": 7,
141
+ "speakleash/mistral_7B-v2/spkl-only_sft_v2/e1_base/spkl-only_7e6w-e2_72422a32": 7,
142
+ "speakleash/mistral_7B-v2/spkl-only_sft_v2/e1_base/spkl-only-e2_dcb87efc": 7,
143
+ "speakleash/mistral_7B-v2/spkl-only_sft_v2/e1_base/spkl-only_7e6-e2_04382c38": 7,
144
+ "speakleash/mistral_7B-v2/spkl-only_sft_v2/e1_base/spkl-only_7e6-e3_860889b1": 7,
145
+ "speakleash/mistral_7B-v2/spkl-only_sft_v2/e1_base/spkl-only_7e6w-e3_78cf3243": 7,
146
+ "speakleash/mistral_7B-v2/spkl-only_sft_v2/e1_base/spkl-only_9e7-e0_27275908": 7,
147
+ "speakleash/mistral_7B-v2/spkl-only_sft_v2/e1_base/spkl-only-e0_d31a18b7": 7,
148
+ "speakleash/mistral_7B-v2/spkl-only_sft_v2/e1_base/spkl-only_7e6-e0_c26126c8": 7,
149
+ "speakleash/mistral_7B-v2/spkl-only_sft_v2/e1_base/spkl-only-e3_a5833b75": 7,
150
+ "speakleash/mistral_7B-v2/spkl-only_sft_v2/e1_base/spkl-only_7e6w-e0_6c834bf7": 7,
151
+ "speakleash/mistral_7B-v2/spkl-only_sft_v2/e1_base/spkl-only_7e6-e1_87b7c12f": 7,
152
+ "speakleash/mistral_7B-v2/spkl-only_sft_v2/e1_base/spkl-only_9e7-e2_5ce06dd2": 7,
153
+ "speakleash/mistral_7B-v2/spkl-only_sft_v2/e1_base/spkl-only_9e7-e1_561ac4bb": 7,
154
+ "speakleash/mistral_7B-v2/spkl-only_sft_v2/e1_base/spkl-only-e1_392d55d9": 7,
155
+ "speakleash/mistral_7B-v2/spkl-all_sft_v2/e1_base/spkl-all_2e6-e2_db0cd739": 7,
156
+ "speakleash/mistral_7B-v2/spkl-all_sft_v2/e1_base/spkl-all_2e6-e3_4960543c": 7,
157
+ "speakleash/mistral_7B-v2/spkl-all_sft_v2/e1_base/spkl-all_2e6-e0_1b65c3ac": 7,
158
+ "speakleash/mistral_7B-v2/spkl-all_sft_v2/e1_base/spkl-all_2e6-e1_70c70cc6": 7,
159
+ "speakleash/mistral-apt3-7B-v2/spkl-only_sft/e1_base/spkl-only-e2_3a071212": 7,
160
+ "speakleash/mistral-apt3-7B-v2/spkl-only_sft/e1_base/spkl-only-e0_6dc2e217": 7,
161
+ "speakleash/mistral-apt3-7B-v2/spkl-only_sft/e1_base/spkl-only-e1_46610eb1": 7,
162
+ "speakleash/mistral-apt3-7B-v2/spkl-only_sft-weighted/e1_base/spkl-only-e0_e79dcb9f": 7,
163
+ "speakleash/mistral-apt3-7B-v2/spkl-only_sft-weighted/e1_base/spkl-only-e1_10a78140": 7,
164
+ "Remek/OpenChat3.5-0106-Spichlerz-Bocian": 7,
165
+ "alpindale/Mistral-7B-v0.2-hf": 7,
166
+ "Azurro/APT3-275M-Base": 0.3,
167
+ "szymonrucinski/Curie-7B-v1": 7,
168
+ "speakleash/mistral-apt3-7B/spkl-all_sft_v4/e0_base/spkl-all-e0-lr5e5_a47a2047": 7,
169
+ "speakleash/mistral-apt3-7B/spkl-all_sft_v4/e0_base/spkl-all-e1_1774eb92": 7,
170
+ "speakleash/mistral-apt3-7B/spkl-all_sft_v4/e0_base/spkl-all-e0-lr2e6_71659188": 7,
171
+ "speakleash/mistral-apt3-7B/spkl-all_sft_v4/e0_base/spkl-all-e0_35239ee5": 7,
172
+ "speakleash/mistral-apt3-7B/spkl-all_sft_v4/e0_base/spkl-all-e2_5257da77": 7,
173
+ "speakleash/mistral-apt3-7B/spkl-all_sft_v4/e0_base/spkl-all-e3_5ca4603b": 7,
174
+ "speakleash/mistral-apt3-7B/spkl-only_sft_v3/e0_base/spkl-only-e3_90666ab5": 7,
175
+ "speakleash/mistral-apt3-7B/spkl-only_sft_v3/e0_base/spkl-only-e1_4e524cad": 7,
176
+ "speakleash/mistral-apt3-7B/spkl-only_sft_v3/e0_base/spkl-only-e0_40cdde38": 7,
177
+ "speakleash/mistral-apt3-7B/spkl-all_sft_v3/e0_base/spkl-all-e0_67274d1b": 7,
178
+ "speakleash/mistral-apt3-7B/spkl-all_sft_v3/e0_base/spkl-all-e1_695e8b44": 7,
179
+ "speakleash/mistral-apt3-7B/spkl-all_sft_v3/e0_base/spkl-all-e2_a9e6a2f0": 7,
180
+ "speakleash/mistral-apt3-7B/spkl-all_sft_v3/e0_base/spkl-all-e3_2ff00c2b": 7,
181
+ "speakleash/mistral-apt3-7B/spkl_sft_v2/e1_4067e14e": 7,
182
+ "speakleash/mistral-apt3-7B/spkl_sft_v2/e0_6214300a": 7,
183
+ "speakleash/mistral-apt3-7B/only-spi_sft_v2/e1_596202b3": 7,
184
+ "speakleash/mistral-apt3-7B/only-spi_sft_v2/e0_c4ea165e": 7,
185
+ "speakleash/mistral-apt3-7B/spkl-only_sft_v4/e0_base/spkl-only-e0_c00001c4": 7,
186
+ "speakleash/mistral-apt3-7B/spkl-only_sft_v4/e0_base/spkl-only-e3_2bcd3961": 7,
187
+ "speakleash/mistral-apt3-7B/spkl-only_sft_v4/e0_base/spkl-only-e1_f2730438": 7,
188
+ "speakleash/mistral-apt3-7B/spkl-only_sft_v4/e0_base/spkl-only-e2_f39a22a2": 7,
189
+ "speakleash/mistral-apt3-7B/spkl-all_sft_v3-lr2/e0_base/spkl-all-e0-lr6_376eb1d5": 7,
190
+ "speakleash/mistral-apt3-7B/spkl-all_sft_v3-lr2/e0_base/spkl-all-e0-lr5_54b6226f": 7,
191
+ "speakleash/mistral-apt3-7B/spkl-only_sft_v3/e0_base/spkl-only-e2_f036d0fd": 7,
192
+ "speakleash/mistral-apt3-7B_v2/spkl-only_7e5-e0_e143e6ce": 7,
193
+ "Nexusflow/Starling-LM-7B-beta": 7,
194
+ "RWKV/v5-Eagle-7B-HF": 7,
195
+ "speakleash/mistral-apt3-7B_v2/spkl-only_sft/e0_base_2e5/spkl-only-e2_afcfbe2d": 7,
196
+ "speakleash/mistral-apt3-7B_v2/spkl-only_sft/e0_base_2e5/spkl-only-e3_6908149d": 7,
197
+ "speakleash/mistral-apt3-7B_v2/spkl-only_sft/e1_base_2e5/spkl-only-e2_d5a874b1": 7,
198
+ "speakleash/mistral-apt3-7B_v2/spkl-only_sft/e1_base_2e5/spkl-only-e3_1be744af": 7,
199
+ "speakleash/mistral-apt3-7B_v2/spkl-only_sft/e1_base_2e5_v6/spkl-only-e0_4efab00a": 7,
200
+ "speakleash/mistral-apt3-7B_v2/spkl-only_sft/e1_base_2e5_v6/spkl-only-e1_1b706f85": 7,
201
+ "speakleash/mistral-apt3-7B_v2/spkl-only_sft/e1_base_2e5_v6/spkl-only-e2_f86f7889": 7,
202
+ "speakleash/mistral-apt3-7B_v2/spkl-only_sft/e1_base_2e5_v6/spkl-only-e3_13641875": 7,
203
+ "speakleash/mistral-apt3-7B_v2/spkl-only_sft/e1_base_2e5_v7w/spkl-only-e0_1f5f4968": 7,
204
+ "speakleash/mistral-apt3-7B_v2/spkl-only_sft/e1_base_2e5_v7w/spkl-only-e1_50de9812": 7,
205
+ "speakleash/mistral-apt3-7B_v2/spkl-only_sft/e1_base_2e5_v7w/spkl-only-e2_dd38abb9": 7,
206
+ "speakleash/mistral-apt3-7B_v2/spkl-only_sft/e1_base_2e5_v7w/spkl-only-e3_36236df3": 7,
207
+ "speakleash/mistral-apt3-7B_v2/spkl-only_sft/e1_base_2e5_v8w/spkl-only-e0_e185fb84": 7,
208
+ "speakleash/mistral-apt3-7B_v2/spkl-only_sft/e1_base_2e5_v8w/spkl-only-e1_fb5d327f": 7,
209
+ "speakleash/mistral-apt3-7B_v2/spkl-only_sft/e1_base_2e5_v8w/spkl-only-e2_dd71be08": 7,
210
+ "speakleash/mistral_7B-v2/spkl-all_sft_v2/e1_base/spkl-all_3e6_v8w-e0_d2d8a320": 7,
211
+ "speakleash/mistral_7B-v2/spkl-all_sft_v2/e1_base/spkl-all_3e6_v8w-e1_cd7c61a1": 7,
212
+ "speakleash/mistral_7B-v2/spkl-all_sft_v2/e1_base/spkl-all_v8wa_9e6-e0_32c27aa5": 7,
213
+ "speakleash/mistral_7B-v2/spkl-all_sft_v2/e1_base/spkl-all_v8wa_9e6-e1_518b38ca": 7,
214
+ "speakleash/mistral_7B-v2/spkl-all_sft_v2/e1_base/spkl-all_v8wa_9e6-e2_84fb05a1": 7,
215
+ "speakleash/mistral_7B-v2/spkl-only_sft_v2/e1_base/spkl-only_v10wa_3e6-e0_2ba34bd9": 7,
216
+ "speakleash/mistral_7B-v2/spkl-only_sft_v2/e1_base/spkl-only_v10wa_3e6-e1_35ecfaaa": 7,
217
+ "speakleash/mistral_7B-v2/spkl-only_sft_v2/e1_base/spkl-only_v10wa_3e6-e2_920b5c3f": 7,
218
+ "speakleash/mistral_7B-v2/spkl-only_sft_v2/e1_base/spkl-only_v10wa_7e6-e0_d137146f": 7,
219
+ "speakleash/mistral_7B-v2/spkl-only_sft_v2/e1_base/spkl-only_v10wa_7e6-e1_5bddbd74": 7,
220
+ "speakleash/mistral_7B-v2/spkl-only_sft_v2/e1_base/spkl-only_v10wa_7e6-e2_bbc67e89": 7,
221
+ "speakleash/mistral_7B-v2/spkl-only_sft_v2/e1_base/spkl-only_v10wa_7e6-e2b_53f28c53": 7,
222
+ "speakleash/mistral_7B-v2/spkl-only_sft_v2/e1_base/spkl-only_v10wa_7e6-e3_9931f988": 7,
223
+ "speakleash/mistral_7B-v2/spkl-only_sft_v2/e1_base/spkl-only_v10wa_7e6-e4_0bc82b61": 7,
224
+ "speakleash/mistral_7B-v2/spkl-only_sft_v2/e1_base/spkl-only_v8wa_9e6-e0_8aa4a0ae": 7,
225
+ "speakleash/mistral_7B-v2/spkl-only_sft_v2/e1_base/spkl-only_v8wa_9e6-e1_57357d6c": 7,
226
+ "speakleash/mistral_7B-v2/spkl-only_sft_v2/e1_base/spkl-only_v8wa_9e6-e2_5eb84913": 7,
227
+ "speakleash/mistral_7B-v2/spkl-only_sft_v2/e1_base/spkl-only_v9wa_3e6-e0_ae5e354c": 7,
228
+ "speakleash/mistral_7B-v2/spkl-only_sft_v2/e1_base/spkl-only_v9wa_7e6-e0_724b2d41": 7,
229
+ "speakleash/mistral_7B-v2/spkl-only_sft_v2/e1_base/spkl-only_v9wa_7e6-e1_d962636b": 7,
230
+ "speakleash/Bielik-7B-v0.1": 7,
231
+ "NousResearch/Nous-Hermes-2-SOLAR-10.7B": 10.7,
232
+ "Qwen/Qwen1.5-7B-Chat": 7,
233
+ "THUDM/chatglm3-6b-base": 6,
234
+ "THUDM/chatglm3-6b": 6,
235
+ "TeeZee/Bielik-SOLAR-LIKE-10.7B-Instruct-v0.1": 10.7,
236
+ "google/gemma-1.1-2b-it": 2,
237
+ "meta-llama/Meta-Llama-3-8B-Instruct": 8,
238
+ "meta-llama/Meta-Llama-3-8B-Instruct,max_length=4096": 8,
239
+ "meta-llama/Meta-Llama-3-8B": 8,
240
+ "meta-llama/Meta-Llama-3-8B,max_length=4096": 8,
241
+ "microsoft/WizardLM-2-7B": 7,
242
+ "speakleash/mistral_7B-v2/spkl-only_sft_v2/e1_base/spkl-only_v10wa4_9e6-e0_193ad881": 7,
243
+ "speakleash/mistral_7B-v2/spkl-only_sft_v2/e1_base/spkl-only_v10wa4_9e6-e1_f40e0808": 7,
244
+ "speakleash/Bielik-7B-Instruct-v0.1": 7,
245
+ "speakleash/mistral_7B-v3/spkl-only_sft_v0/e0_base/spkl-only_v11wa_9e6-e0_fe38d62e": 7,
246
+ "speakleash/mistral_7B-v3/spkl-only_sft_v0/e0_base/spkl-only_v11wa_9e6-e1_6f84698e": 7,
247
+ "speakleash/mistral_7B-v3/spkl-only_sft_v0/e0_base/spkl-only_v11wap_9e6-e0_5c6927dd": 7,
248
+ "speakleash/mistral_7B-v3/spkl-only_sft_v0/e0_base/spkl-only_v11wap_9e6-e1_1d6755a9": 7,
249
+ "speakleash/mistral_7B-v3/spkl-only_v0-e0_b93294c8": 7,
250
+ "speakleash/mistral_7B-v3/spkl-only_v2-e0_e5547fd5": 7,
251
+ "speakleash/Bielik-7B-Instruct-v0.1-GPTQ,autogptq=True": 7,
252
+ "speakleash/Bielik-7B-Instruct-v0.1,load_in_4bit=True": 7,
253
+ "speakleash/Test-v02-ep3": 7,
254
+ "speakleash/mistral_7B-v3/spkl-only_v2-e1.34500_a9c75816": 7,
255
+ "CohereForAI/c4ai-command-r-v01,max_length=4096": 35,
256
+ "Qwen/Qwen1.5-14B-Chat": 14,
257
+ "Remek/Llama-3-8B-Omnibus-1-PL-v01-INSTRUCT": 8,
258
+ "Remek/Llama-3-8B-Omnibus-1-PL-v01-INSTRUCT,max_length=4096": 8,
259
+ "internlm/internlm2-20b,max_length=4096": 20,
260
+ "internlm/internlm2-chat-20b,max_length=4096": 20,
261
+ "lex-hue/Delexa-7b": 7,
262
+ "lmsys/vicuna-13b-v1.5": 13,
263
+ "maciek-pioro/Mixtral-8x7B-v0.1-pl,max_length=4096": 46.7,
264
+ "mistralai/Mixtral-8x7B-Instruct-v0.1,max_length=4096": 46.7,
265
+ "mistralai/Mixtral-8x7B-v0.1,max_length=4096": 46.7,
266
+ "speakleash/Test-001-wiki": 7,
267
+ "speakleash/Test-002": 7,
268
+ "teknium/OpenHermes-13B": 13,
269
+ "meta-llama/Meta-Llama-3-70B-Instruct,max_length=4096": 70,
270
+ "meta-llama/Meta-Llama-3-70B,max_length=4096": 70,
271
+ "mistralai/Mixtral-8x22B-Instruct-v0.1,max_length=4096": 141,
272
+ "mistralai/Mixtral-8x22B-v0.1,max_length=4096": 141,
273
+ "Qwen/Qwen1.5-14B-Chat,max_length=4096": 14,
274
+ "Qwen/Qwen1.5-32B-Chat,max_length=4096": 32,
275
+ "Qwen/Qwen1.5-72B-Chat,max_length=4096": 72,
276
+ "Qwen/Qwen1.5-32B,max_length=4096": 32,
277
+ "Qwen/Qwen1.5-72B,max_length=4096": 72,
278
+ "Qwen/Qwen1.5-7B": 7,
279
+ "Qwen/Qwen2-0.5B-Instruct": 0.5,
280
+ "Qwen/Qwen2-0.5B": 0.5,
281
+ "Qwen/Qwen2-1.5B-Instruct": 1.5,
282
+ "Qwen/Qwen2-1.5B": 1.5,
283
+ "Qwen/Qwen2-7B-Instruct": 7,
284
+ "Qwen/Qwen2-7B": 7,
285
+ "model=gpt-3.5-turbo-instruct": 20,
286
+ "model=gpt-4-turbo-2024-04-09": 1000,
287
+ "01-ai/Yi-1.5-6B-Chat": 6,
288
+ "01-ai/Yi-1.5-6B": 6,
289
+ "01-ai/Yi-1.5-9B-Chat": 9,
290
+ "01-ai/Yi-1.5-9B": 9,
291
+ "CohereForAI/aya-23-35B,max_length=4096": 35,
292
+ "CohereForAI/aya-23-8B": 8,
293
+ "NousResearch/Hermes-2-Pro-Llama-3-8B": 8,
294
+ "NousResearch/Hermes-2-Theta-Llama-3-8B": 8,
295
+ "Remek/OpenChat-3.5-0106-PL-Omnibusv2": 7,
296
+ "mistralai/Mistral-7B-Instruct-v0.3": 7,
297
+ "mistralai/Mistral-7B-v0.3": 7,
298
+ "nvidia/Llama3-ChatQA-1.5-8B": 8,
299
+ "openchat/openchat-3.5-0106-gemma": 7,
300
+ "openchat/openchat-3.6-8b-20240522": 8,
301
+ "tiiuae/falcon-11B": 11,
302
+ "mlabonne/NeuralDaredevil-8B-abliterated": 8,
303
+ "01-ai/Yi-1.5-34B-Chat,max_length=4096": 34,
304
+ "Qwen/Qwen2-57B-A14B-Instruct,max_length=4096": 57,
305
+ "Qwen/Qwen2-72B-Instruct,max_length=4096": 72,
306
+ "Qwen/Qwen2-72B,max_length=4096": 72,
307
+ "THUDM/glm-4-9b-chat": 9,
308
+ "THUDM/glm-4-9b": 9,
309
+ "google/recurrentgemma-9b-it": 9,
310
+ "microsoft/Phi-3-medium-4k-instruct,max_length=4096": 14,
311
+ "microsoft/Phi-3-mini-4k-instruct": 3.8,
312
+ "microsoft/Phi-3-small-8k-instruct": 7.4,
313
+ "ssmits/Falcon2-5.5B-Polish": 5.5,
314
+ "alpindale/WizardLM-2-8x22B,max_length=4096": 141,
315
+ "dreamgen/WizardLM-2-7B": 7
316
+ }