models: - model: mistralai/Mistral-7B-v0.1 # No parameters necessary for base model - model: Kukedlc/NeuralMaxime-7B-slerp #Emphasize the beginning of Vicuna format models parameters: weight: 0.36 density: 0.65 - model: eren23/ogno-monarch-jaskier-merge-7b parameters: weight: 0.34 density: 0.6 # Vicuna format - model: eren23/dpo-binarized-NeutrixOmnibe-7B parameters: weight: 0.3 density: 0.6 merge_method: dare_ties base_model: mistralai/Mistral-7B-v0.1 parameters: int8_mask: true dtype: bfloat16 random_seed: 0