File size: 263 Bytes
d2bb99c |
1 2 3 4 5 6 7 8 9 10 11 12 |
models:
- model: eren23/ogno-monarch-jaskier-merge-7b-OH-PREF-DPO-v2
parameters:
weight: 0.35
- model: yam-peleg/Experiment26-7B
parameters:
weight: 0.65
base_model: yam-peleg/Experiment26-7B
merge_method: task_arithmetic
dtype: bfloat16
|