QwenSlerp7-14B / mergekit_config.yml
hotmailuser's picture
Upload folder using huggingface_hub
6ae58b2 verified
raw
history blame
276 Bytes
models:
- model: allknowingroger/QwenSlerp6-14B
- model: djuna/Q2.5-Veltha-14B
merge_method: slerp
base_model: allknowingroger/QwenSlerp6-14B
dtype: bfloat16
parameters:
t: [0, 0.5, 1, 0.5, 0] # V shaped curve: Hermes for input & output, WizardMath in the middle layers