base_model: teknium/OpenHermes-2.5-Mistral-7B | |
dtype: bfloat16 | |
merge_method: task_arithmetic | |
slices: | |
- sources: | |
- layer_range: [0, 32] | |
model: teknium/OpenHermes-2.5-Mistral-7B | |
- layer_range: [0, 32] | |
model: nlpguy/Hermes-low-tune-2 | |
parameters: | |
weight: 0.2 | |
- layer_range: [0, 32] | |
model: beowolx/MistralHermes-CodePro-7B-v1 | |
parameters: | |
weight: 0.2 | |
- layer_range: [0, 32] | |
model: flemmingmiguel/Mistrality-7B | |
parameters: | |
weight: 0.2 | |
- layer_range: [0, 32] | |
model: charlesdedampierre/TopicNeuralHermes-2.5-Mistral-7B | |
parameters: | |
weight: 0.2 | |
- layer_range: [0, 32] | |
model: openaccess-ai-collective/openhermes-2_5-dpo-no-robots | |
parameters: | |
weight: 0.2 |