File size: 548 Bytes
f8f30e9 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 |
default_stage:
default_modifiers:
AWQModifier:
targets: [Linear]
ignore: [lm_head]
scheme: W4A16
mappings:
- smooth_layer: re:.*input_layernorm$
balance_layers: ['re:.*q_proj$', 're:.*k_proj$', 're:.*v_proj$']
- smooth_layer: re:.*v_proj$
balance_layers: ['re:.*o_proj$']
- smooth_layer: re:.*post_attention_layernorm$
balance_layers: ['re:.*gate_proj$', 're:.*up_proj$']
- smooth_layer: re:.*up_proj$
balance_layers: ['re:.*down_proj$']
duo_scaling: true
|