models: - model: Phind/Phind-CodeLlama-34B-v2 parameters: density: 0.5 weight: 0.6 - model: codefuse-ai/CodeFuse-CodeLlama-34B parameters: density: 0.5 weight: 0.4 merge_method: dare_ties base_model: Phind/Phind-CodeLlama-34B-v2 parameters: int8_mask: true dtype: bfloat16