aarnphm's picture
Add files using upload-large-folder tool
8a6761d verified
raw
history blame contribute delete
356 Bytes
DEFAULT_stage:
DEFAULT_modifiers:
QuantizationModifier:
ignore: ['re:.*lm_head', 're:.*self_attn', 're:.*router', 're:.*vision_model', 're:.*multi_modal_projector',
're:.*shared_expert', 're:.*feed_forward.gate_proj', 're:.*feed_forward.up_proj',
're:.*feed_forward.down_proj']
targets: [Linear]
scheme: FP8_DYNAMIC