gemma-2-9b-merge-della-2 / mergekit_config.yml
h-iida's picture
Upload folder using huggingface_hub
b1a8666 verified
raw
history blame
558 Bytes
models:
- model: google/gemma-2-9b
- model: google/gemma-2-9b-it
parameters:
density: 1.0
weight: 0.6
- model: wzhouad/gemma-2-9b-it-WPO-HB
parameters:
density: 0.55
weight: 0.6
- model: princeton-nlp/gemma-2-9b-it-SimPO
parameters:
density: 0.35
weight: 0.6
- model: UCLA-AGI/Gemma-2-9B-It-SPPO-Iter3
parameters:
density: 0.25
weight: 0.4
merge_method: della
base_model: google/gemma-2-9b
parameters:
normalize: true
int8_mask: true
lambda: 1.0
epsilon: 0.1
dtype: float16