File size: 312 Bytes
3322c00
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
slices:
- sources:
  - layer_range: [0, 14]
    model: Qwen/Qwen2.5-7B-Instruct
- sources:
  - layer_range: [10, 14]
    model: Qwen/Qwen2.5-7B-Instruct
- sources:
  - layer_range: [14, 28]
    model: Qwen/Qwen2.5-7B-Instruct
merge_method: passthrough
dtype: bfloat16
tokenizer_source: "Qwen/Qwen2.5-7B-Instruct"