models: - model: meta-llama/Meta-Llama-3-8B-Instruct # No parameters necessary for base model - model: aaditya/Llama3-OpenBioLLM-8B parameters: density: 0.53 weight: 0.5 - model: johnsnowlabs/JSL-MedLlama-3-8B-v1.0 parameters: density: 0.53 weight: 0.3 - model: winninghealth/WiNGPT2-Llama-3-8B-Base parameters: density: 0.53 weight: 0.2 merge_method: dare_ties base_model: meta-llama/Meta-Llama-3-8B-Instruct parameters: int8_mask: true dtype: bfloat16