File size: 1,084 Bytes
2e079d7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 |
base_model: NousResearch/Meta-Llama-3-8B
dtype: bfloat16
merge_method: dare_ties
parameters:
int8_mask: 1.0
slices:
- sources:
- layer_range: [0, 32]
model: NousResearch/Meta-Llama-3-8B
- layer_range: [0, 32]
model: nbeerbower/llama-3-spicy-abliterated-stella-8B
parameters:
density: 0.6
weight: 0.22
- layer_range: [0, 32]
model: flammenai/Mahou-1.2-llama3-8B
parameters:
density: 0.6
weight: 0.22
- layer_range: [0, 32]
model: hf-100/Llama-3-Spellbound-Instruct-8B-0.3
parameters:
density: 0.58
weight: 0.14
- layer_range: [0, 32]
model: zeroblu3/NeuralPoppy-EVO-L3-8B
parameters:
density: 0.58
weight: 0.14
- layer_range: [0, 32]
model: Nitral-AI/Hathor_Stable-v0.2-L3-8B
parameters:
density: 0.56
weight: 0.1
- layer_range: [0, 32]
model: Hastagaras/Jamet-8B-L3-MK.V-Blackroot
parameters:
density: 0.56
weight: 0.1
- layer_range: [0, 32]
model: emnakamura/llama-3-MagicDolphin-8B
parameters:
density: 0.55
weight: 0.08 |