File size: 574 Bytes
06d69ec |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 |
slices:
- sources:
- model: "../2xbagel-dpo-34b-v0.2"
layer_range: [0, 100]
- model: "../2xNous-Capybara-34B"
layer_range: [0, 100]
merge_method: slerp
base_model: "../2xbagel-dpo-34b-v0.2"
parameters:
t:
- filter: lm_head
value: [0.55]
- filter: embed_tokens
value: [0.7]
- filter: self_attn
value: [0.65, 0.35]
- filter: mlp
value: [0.35, 0.65]
- filter: layernorm
value: [0.4, 0.6]
- filter: modelnorm
value: [0.6]
- value: 0.5 # fallback for rest of tensors
dtype: float16 |