File size: 368 Bytes
5d8cb39
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
dtype: bfloat16
merge_method: passthrough
slices:
- sources:
  - layer_range: [0, 22]
    model: Qwen/Qwen2-7B
- sources:
  - layer_range: [6, 22]
    model: Qwen/Qwen2-7B
    parameters:
      scale:
      - filter: o_proj
        value: 0.0
      - filter: down_proj
        value: 0.0
      - value: 1.0
- sources:
  - layer_range: [22, 28]
    model: Qwen/Qwen2-7B