tokenizer_source: union embed_slerp: true name: Proteus-8B models: - model: cognitivecomputations/dolphin-2.9-llama3-8b parameters: density: 0.5 weight: 0.4 # No parameters necessary for base model - model: NousResearch/Hermes-2-Theta-Llama-3-8B parameters: density: 0.5 weight: 0.6 merge_method: dare_ties base_model: NousResearch/Hermes-2-Theta-Llama-3-8B parameters: int8_mask: true dtype: bfloat16