models: - model: meta-llama/Meta-Llama-3-8B #no parameters necessary for base model - model: Weyaxi/Einstein-v6.1-Llama3-8B parameters: density: 0.5 weight: 0.5 - model: meta-llama/Meta-Llama-3-8B-Instruct parameters: density: 0.5 weight: 0.5 merge_method: ties base_model: meta-llama/Meta-Llama-3-8B parameters: normalize: false int8_mask: true dtype: bfloat16