cpatonn's picture
Upload folder using huggingface_hub
0999b72 verified
quant_stage:
quant_modifiers:
AWQModifier:
config_groups:
group_0:
targets: [Linear]
weights:
num_bits: 8
type: int
symmetric: true
group_size: 32
strategy: group
block_structure: null
dynamic: false
actorder: null
observer: mse
observer_kwargs: {}
input_activations: null
output_activations: null
format: null
targets: [Linear]
ignore: [language_model.lm_head, language_model.model.embed_tokens, 're:.*input_layernorm',
're:.*post_attention_layernorm', language_model.model.norm, 're:vision_model.*', 're:mlp1.*']
mappings:
- smooth_layer: re:.*input_layernorm$
balance_layers: ['re:.*q_proj$', 're:.*k_proj$', 're:.*v_proj$']
- smooth_layer: re:.*v_proj$
balance_layers: ['re:.*o_proj$']
- smooth_layer: re:.*post_attention_layernorm$
balance_layers: ['re:.*gate_proj$', 're:.*up_proj$']
- smooth_layer: re:.*up_proj$
balance_layers: ['re:.*down_proj$']
duo_scaling: true