mgoin's picture
Upload folder using huggingface_hub
cd23aab verified
raw
history blame contribute delete
156 Bytes
DEFAULT_stage:
DEFAULT_modifiers:
QuantizationModifier:
ignore: [lm_head, 're:.*block_sparse_moe.gate']
targets: Linear
scheme: FP8