File size: 230 Bytes
95c31f6
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13

models:
  - model: alfredplpl/suzume-poc
    layer_range: [0, 18]
    parameters:
      weight: 0.4
  - model: alpindale/gemma-2b-it
    layer_range: [0, 18]
    parameters:
      weight: 0.6
merge_method: linear
dtype: bfloat16