File size: 354 Bytes
26718a2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18

models:
  - model: abacusai/Smaug-34B-v0.1
  - model: jondurbin/bagel-dpo-34b-v0.2
    parameters:
      density: 0.45
      weight: 0.5
  - model: abacusai/MetaMath-Bagel-DPO-34B
    parameters:
      density: 0.48
      weight: 0.5
merge_method: ties
base_model: abacusai/Smaug-34B-v0.1
parameters:
  normalize: true
  int8_mask: true
dtype: bfloat16