models: - model: LeoLM/leo-mistral-hessianai-7b-chat # no parameters necessary for base model - model: FelixChao/WestSeverus-7B-DPO-v2 parameters: density: 0.60 weight: 0.30 - model: mayflowergmbh/Wiedervereinigung-7b-dpo parameters: density: 0.65 weight: 0.40 - model: cognitivecomputations/openchat-3.5-0106-laser parameters: density: 0.6 weight: 0.3 merge_method: dare_ties base_model: LeoLM/leo-mistral-hessianai-7b-chat parameters: int8_mask: true dtype: bfloat16 random_seed: 0 tokenizer_source: base