File size: 635 Bytes
c4a5bd3 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 |
base_model: teknium/OpenHermes-2.5-Mistral-7B
dtype: bfloat16
merge_method: task_arithmetic
slices:
- sources:
- layer_range: [0, 32]
model: teknium/OpenHermes-2.5-Mistral-7B
- layer_range: [0, 32]
model: simonveitner/Math-OpenHermes-2.5-Mistral-7B
parameters:
weight: 0.25
- layer_range: [0, 32]
model: openaccess-ai-collective/dpopenhermes-alpha-v0
parameters:
weight: 0.25
- layer_range: [0, 32]
model: mlabonne/NeuralHermes-2.5-Mistral-7B
parameters:
weight: 0.25
- layer_range: [0, 32]
model: mlabonne/NeuralHermes-2.5-Mistral-7B-laser
parameters:
weight: 0.25 |