{ "_name_or_path": "/scratch/1/user/sclincha/cocom_release/cocom-v1-16-mistral-7b/", "architectures": [ "COCOM" ], "auto_map": { "AutoConfig": "modeling_cocom.COCOMConfig", "AutoModel": "modeling_cocom.COCOM", "AutoModelForCausalLM": "modeling_cocom.COCOM" }, "compr_linear_type": "concat", "compr_model_name": null, "compr_rate": 16, "decoder_model_name": "mistralai/Mistral-7B-Instruct-v0.2", "generation_top_k": 5, "lora": true, "lora_r": 16, "max_new_tokens": 128, "model_type": "COCOM", "quantization": "no", "sep": true, "torch_dtype": "bfloat16", "training_form": "both", "transformers_version": "4.45.2" }