name: Athena-8B tokenizer_source: union embed_slerp: true models: - model: NousResearch/Hermes-2-Pro-Llama-3-8B parameters: density: 0.5 weight: 0.6 # No parameters necessary for base model - model: cognitivecomputations/dolphin-2.9-llama3-8b parameters: density: 0.5 weight: 0.4 merge_method: dare_ties base_model: NousResearch/Hermes-2-Pro-Llama-3-8B parameters: int8_mask: true dtype: bfloat16