File size: 144 Bytes
0b1f6cd |
1 2 3 4 5 6 7 |
DEFAULT_stage:
DEFAULT_modifiers:
QuantizationModifier:
ignore: [lm_head, 're:.*mlp.gate$']
targets: Linear
scheme: FP8
|
0b1f6cd |
1 2 3 4 5 6 7 |
DEFAULT_stage:
DEFAULT_modifiers:
QuantizationModifier:
ignore: [lm_head, 're:.*mlp.gate$']
targets: Linear
scheme: FP8
|