File size: 327 Bytes
64b0cb4
 
 
 
 
 
 
 
152f64c
64b0cb4
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
{
  "bits": 4,
  "group_size": 128,
  "damp_percent": 0.005,
  "desc_act": false,
  "static_groups": false,
  "sym": false,
  "true_sequential": true,
  "model_name_or_path": "",
  "model_file_base_name": "model",
  "quant_method": "gptq",
  "checkpoint_format": "gptq",
  "meta": {
    "quantizer": "autogptq:0.8.0.dev1"
  }
}