wasm-glaive-coder-7b-q4f32_1 / params /mlc-chat-config.json
Caleb Fahlgren
update mlc chat config
ddfd61b
raw
history blame contribute delete
851 Bytes
{
"model_lib": "glaive-coder-7b-q4f32_1",
"local_id": "glaive-coder-7b-q4f32_1",
"conv_template": "llama-2",
"conv_config": {
"system": "<|im_start|>system: You are an AI assistant that follows instructions extremely well. Help as much as you can.",
"roles": [
"<|im_start|>user",
"<|im_start|>assistant"
],
"seps": [
"<|im_end|>\n"
],
"stop_str": "<|im_end|>"
},
"temperature": 0.7,
"repetition_penalty": 1.0,
"top_p": 0.95,
"mean_gen_len": 128,
"max_gen_len": 512,
"max_window_size": 2048,
"num_shards": 1,
"shift_fill_factor": 0.3,
"tokenizer_files": [
"added_tokens.json",
"tokenizer.model"
],
"model_category": "llama",
"model_name": "glaive-coder-7b",
"vocab_size": 32017
}