models: - model: codellama/CodeLlama-70b-Python-hf parameters: density: 0.5 weight: 0.5 - model: codellama/CodeLlama-70b-Instruct-hf parameters: density: 0.5 weight: 1.0 merge_method: dare_ties base_model: meta-llama/Llama-2-70b-hf parameters: # You can uncomment and set these parameters as needed # normalize: false # int8_mask: true dtype: float16