models: - model: Locutusque/Llama-3-Orca-1.0-8B # no parameters necessary for base model - model: Locutusque/llama-3-neural-chat-v1-8b parameters: density: 0.60 weight: 0.15 - model: DiscoResearch/Llama3_DiscoLM_German_8b_v0.1_experimental parameters: density: 0.65 weight: 0.7 merge_method: dare_ties base_model: Locutusque/Llama-3-Orca-1.0-8B parameters: int8_mask: true dtype: bfloat16 random_seed: 0 tokenizer_source: base