File size: 263 Bytes
56cdb5c
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
dtype: float16
merge_method: linear
slices:
- sources:
  - layer_range: [0, 2]
    model: my_dir2/checkpoint-3
    parameters:
      weight: 0.5
  - layer_range: [0, 2]
    model: trl-internal-testing/tiny-random-LlamaForCausalLM
    parameters:
      weight: 0.5