File size: 417 Bytes
6044ecf |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 |
base_model: davidkim205/Rhea-72b-v0.5
dtype: bfloat16
merge_method: slerp
parameters:
t:
- filter: self_attn
value:
- 0
- 0.8
- 0.8
- 0.8
- 1
- filter: mlp
value:
- 1
- 0.2
- 0.2
- 0.2
- 0
- value: 0.2
slices:
- sources:
- layer_range:
- 0
- 80
model: abacusai/Smaug-72B-v0.1
- layer_range:
- 0
- 80
model: davidkim205/Rhea-72b-v0.5
|