File size: 524 Bytes
8c39c9a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21

slices:
  - sources:
      - model: mistralai/Mistral-7B-v0.3
        layer_range: [0, 24]
  - sources: # add middle layers with residuals scaled to zero
      - model: mistralai/Mistral-7B-v0.3
        layer_range: [8, 24]
        parameters:
          scale:
            - filter: o_proj
              value: 0.0
            - filter: down_proj
              value: 0.0
            - value: 1.0
  - sources:
      - model: mistralai/Mistral-7B-v0.3
        layer_range: [24, 32]
merge_method: passthrough
dtype: bfloat16