models: - model: princeton-nlp/Llama-3-8B-ProLong-512k-Base parameters: weight: 0.4 density: 0.7 - model: Arkana08/LexiMaid-L3-8B parameters: weight: 0.3 density: 0.75 - model: DavidAU/L3.1-Dark-Planet-SpinFire-Uncensored-8B parameters: weight: 0.3 density: 0.65 merge_method: dare_ties base_model: princeton-nlp/Llama-3-8B-ProLong-512k-Base parameters: int8_mask: true dtype: bfloat16