File size: 707 Bytes
1f3dc6b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 |
{
"model_name": "resources/models/models/NousResearch_Meta-Llama-3-70B-Instruct",
"quant_config": {
"wbits": 4,
"abits": 16,
"group_size": 128,
"symmetric": true
},
"start_sample": 0,
"nsamples": 128,
"epochs": 10,
"aug_loss": true,
"eval_ppl": true,
"real_quant": true,
"lwc_lr": 0.01,
"use_lr_scheduler": false,
"cache_dir": "resources/cache",
"output_dir": "resources/models/models/NousResearch_Meta-Llama-3-70B-Instruct_omniquant_symm/logs",
"save_dir": "resources/models/models/NousResearch_Meta-Llama-3-70B-Instruct_omniquant_symm/NousResearch_Meta-Llama-3-70B-Instruct",
"config_class": "OmniquantConfig"
}
|