models: - model: mlabonne/AlphaMonarch-7B # No parameters necessary for base model - model: RJuro/munin-neuralbeagle-7b parameters: density: 0.53 weight: 0.6 merge_method: dare_ties base_model: mlabonne/AlphaMonarch-7B parameters: int8_mask: true dtype: bfloat16