File size: 514 Bytes
a1a089f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 |
base_model: mistralai/Mistral-7B-Instruct-v0.2
dtype: bfloat16
merge_method: dare_ties
models:
- model: mistralai/Mistral-7B-Instruct-v0.2
- model: openchat/openchat-3.5-0106
parameters:
density: 0.8
weight: 0.4
- model: OpenPipe/mistral-ft-optimized-1227
parameters:
density: 0.8
weight: 0.4
- model: berkeley-nest/Starling-LM-7B-alpha
parameters:
density: 0.8
weight: 0.5
- model: jan-hq/supermario-v2
parameters:
density: 0.8
weight: 0.3
parameters:
int8_mask: true
|