File size: 767 Bytes
54869dc |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 |
base_model:
model:
path: TheBloke/Llama-2-13B-fp16
dtype: bfloat16
merge_method: ties
parameters:
int8_mask: 1.0
normalize: 1.0
slices:
- sources:
- layer_range: [0, 40]
model:
model:
path: Masterjp123/Snowyrp-V2B-P1
parameters:
density: [1.0, 0.7, 0.1]
weight: 1.0
- layer_range: [0, 40]
model:
model:
path: Masterjp123/SnowyRP-FinalV1-L2-13B
parameters:
density: 0.5
weight: [0.0, 0.3, 0.7, 1.0]
- layer_range: [0, 40]
model:
model:
path: sauce1337/BerrySauce-L2-13b
parameters:
density: 0.33
weight:
- filter: mlp
value: 0.5
- value: 0.0
- layer_range: [0, 40]
model:
model:
path: TheBloke/Llama-2-13B-fp16 |