base_model: NousResearch/Meta-Llama-3-8B dtype: bfloat16 merge_method: dare_ties parameters: int8_mask: 1.0 slices: - sources: - layer_range: [0, 32] model: NousResearch/Meta-Llama-3-8B - layer_range: [0, 32] model: nbeerbower/llama-3-spicy-abliterated-stella-8B parameters: density: 0.6 weight: 0.22 - layer_range: [0, 32] model: flammenai/Mahou-1.2-llama3-8B parameters: density: 0.6 weight: 0.22 - layer_range: [0, 32] model: hf-100/Llama-3-Spellbound-Instruct-8B-0.3 parameters: density: 0.58 weight: 0.14 - layer_range: [0, 32] model: zeroblu3/NeuralPoppy-EVO-L3-8B parameters: density: 0.58 weight: 0.14 - layer_range: [0, 32] model: Nitral-AI/Hathor_Stable-v0.2-L3-8B parameters: density: 0.56 weight: 0.1 - layer_range: [0, 32] model: Hastagaras/Jamet-8B-L3-MK.V-Blackroot parameters: density: 0.56 weight: 0.1 - layer_range: [0, 32] model: emnakamura/llama-3-MagicDolphin-8B parameters: density: 0.55 weight: 0.08