File size: 424 Bytes
1b8f242
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
base_model: /data/home/wangyongxin/model/mistral/7b/merged/openchat-3.5-1210
dtype: bfloat16
merge_method: dare_ties
parameters:
  int8_mask: 1.0
slices:
- sources:
  - layer_range: [0, 32]
    model: /data/home/wangyongxin/model/mistral/7b/merged/openchat-3.5-1210
  - layer_range: [0, 32]
    model: /data/home/wangyongxin/model/mistral/7b/merged/Starling-LM-7B-alpha
    parameters:
      density: 0.62
      weight: 0.14