WestOrcaDPO-7B-GTA / mergekit_config.yml
jsfs11's picture
Upload folder using huggingface_hub
0237463 verified
raw
history blame
341 Bytes
models:
- model: decruz07/kellemar-DPO-Orca-Distilled-7B-SLERP
parameters:
density: 0.5
weight: 0.4
- model: senseable/WestLake-7B-v2
parameters:
density: 0.5
weight: 0.6
merge_method: task_arithmetic
base_model: mistralai/Mistral-7B-v0.1
parameters:
int8_mask: True
normalize: True
dtype: float16