fynnsu's picture
Upload folder using huggingface_hub
5f78311 verified
default_stage:
default_modifiers:
AWQModifier:
targets: [Linear]
ignore: [lm_head, 're:.*mlp.gate$', 're:.*mlp.shared_expert_gate$', 're:visual.*']
scheme: W4A16
mappings:
- smooth_layer: re:.*input_layernorm$
balance_layers: ['re:.*q_proj$', 're:.*k_proj$', 're:.*v_proj$']
- smooth_layer: re:.*v_proj$
balance_layers: ['re:.*o_proj$']
- smooth_layer: re:.*post_attention_layernorm$
balance_layers: ['re:.*mlp.experts.*.gate_proj$', 're:.*mlp.experts.*.up_proj$']
- smooth_layer: re:.*up_proj$
balance_layers: ['re:.*down_proj$']
duo_scaling: false