base_model: unsloth/Meta-Llama-3.1-8B-Instruct dtype: bfloat16 merge_method: dare_ties slices: - sources: - layer_range: [0, 32] model: akjindal53244/Llama-3.1-Storm-8B parameters: density: 0.8 weight: 0.25 - layer_range: [0, 32] model: arcee-ai/Llama-3.1-SuperNova-Lite parameters: density: 0.8 weight: 0.33 - layer_range: [0, 32] model: SicariusSicariiStuff/LLAMA-3_8B_Unaligned_BETA parameters: density: 0.8 weight: 0.42 - layer_range: [0, 32] model: unsloth/Meta-Llama-3.1-8B-Instruct tokenizer_source: base