File size: 669 Bytes
050fce7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 |
{
"_name_or_path": "/scratch/1/user/sclincha/cocom_release/cocom-v1-128-mistral-7b",
"architectures": [
"COCOM"
],
"auto_map": {
"AutoConfig": "modeling_cocom.COCOMConfig",
"AutoModel": "modeling_cocom.COCOM",
"AutoModelForCausalLM": "modeling_cocom.COCOM"
},
"compr_linear_type": "concat",
"compr_model_name": null,
"compr_rate": 128,
"decoder_model_name": "mistralai/Mistral-7B-Instruct-v0.2",
"generation_top_k": 5,
"lora": true,
"lora_r": 16,
"max_new_tokens": 128,
"model_type": "COCOM",
"quantization": "no",
"sep": true,
"torch_dtype": "bfloat16",
"training_form": "both",
"transformers_version": "4.45.2"
}
|