models: - layer_range: [0, 40] model: meta-llama/Meta-Llama-3-8B parameters: weight: 0.2 - layer_range: [0, 40] model: meta-llama/Meta-Llama-3-8B-Instruct parameters: weight: 0.8 merge_method: task_arithmetic base_model: meta-llama/Meta-Llama-3-8B dtype: bfloat16 random_seed: 0