models: - model: liminerity/M7-7b # no parameters necessary for base model - model: yam-peleg/Experiment26-7B parameters: weight: 0.2 density: 0.66 - model: Gille/StrangeMerges_32-7B-slerp parameters: weight: 0.2 density: 0.55 - model: MSL7/INEX12-7b parameters: weight: 0.2 density: 0.33 - model: automerger/YamShadow-7B parameters: weight: 0.2 density: 0.66 - model: Kukedlc/NeuralSirKrishna-7b parameters: weight: 0.2 density: 0.66 merge_method: dare_ties base_model: liminerity/M7-7b parameters: int8_mask: true normalize: true dtype: bfloat16