Hermes-low-tune-3 / mergekit_config.yml
nlpguy's picture
Upload folder using huggingface_hub
bcfecde
raw
history blame
284 Bytes
base_model: openaccess-ai-collective/DPOpenHermes-7B-v2
dtype: bfloat16
merge_method: slerp
parameters:
t:
- value: 0.25
slices:
- sources:
- layer_range: [0, 32]
model: openaccess-ai-collective/DPOpenHermes-7B-v2
- layer_range: [0, 32]
model: nlpguy/Hermes-low-tune-2