models: - model: cstr/llama3-8b-spaetzle-v13 # no parameters necessary for base model - model: nbeerbower/llama-3-wissenschaft-8B-v2 parameters: density: 0.65 weight: 0.4 merge_method: dare_ties base_model: cstr/llama3-8b-spaetzle-v13 parameters: int8_mask: true dtype: bfloat16 random_seed: 0 tokenizer_source: base