File size: 1,268 Bytes
c9cde48 cccce17 c9cde48 9f960b9 c9cde48 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 |
{
"comp_cgenerate_active": false,
"comp_ctranslate_active": false,
"comp_cwhisper_active": false,
"comp_diffusers2_active": false,
"comp_ifw_active": false,
"comp_onediff_active": false,
"comp_step_caching_active": false,
"comp_torch_compile_active": false,
"comp_ws2t_active": false,
"comp_x-fast_active": false,
"prune_torch-structured_active": false,
"quant_aqlm_active": false,
"quant_awq_active": false,
"quant_gptq_active": false,
"quant_half_active": false,
"quant_hqq_active": false,
"quant_llm-int8_active": true,
"quant_quanto_active": false,
"quant_torch_dynamic_active": false,
"quant_torch_static_active": false,
"quant_llm-int8_compute_dtype": "bfloat16",
"quant_llm-int8_double_quant": false,
"quant_llm-int8_enable_fp32_cpu_offload": false,
"quant_llm-int8_has_fp16_weight": false,
"quant_llm-int8_quant_type": "fp4",
"quant_llm-int8_threshold": 6.0,
"quant_llm-int8_weight_bits": 8,
"max_batch_size": 1,
"device": "cuda",
"cache_dir": "/covalent/.cache/models/tmpz0jahjuk",
"task": "",
"save_load_fn": "bitsandbytes",
"save_load_fn_args": {
"weight_quantization_bits": "param.dtype"
},
"api_key": null
} |