pythia-400m-8epoch-q0f16 / mlc-chat-config.json
saum7800
make LM
dc1e23d
raw
history blame contribute delete
439 Bytes
{
"model_lib": "pythia-400m-8epoch-q0f16",
"local_id": "pythia-400m-8epoch-q0f16",
"conv_template": "LM",
"temperature": 0.7,
"repetition_penalty": 1,
"top_p": 0.95,
"mean_gen_len": 128,
"max_gen_len": 512,
"max_window_size": 768,
"num_shards": 1,
"shift_fill_factor": 0.3,
"tokenizer_files": [
"tokenizer.json"
],
"model_category": "gpt_neox",
"model_name": "pythia-400m-8epoch",
"vocab_size": 50432
}