File size: 507 Bytes
d3794c0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21

slices:
  - sources:
      - model: mistralai/Mistral-7B-v0.1
        layer_range: [0, 32]
      - model: HuggingFaceH4/zephyr-7b-alpha
        layer_range: [0, 32]
        parameters:
          density: 0.53
          weight: 0.4
      - model: cognitivecomputations/dolphin-2.6-mistral-7b-dpo-laser
        layer_range: [0, 32]
        parameters:
          density: 0.53
          weight: 0.4
merge_method: dare_linear
base_model: mistralai/Mistral-7B-v0.1
parameters:
  int8_mask: true
dtype: bfloat16