File size: 412 Bytes
eb5b440 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 |
base_model: D:/ModelMerge/models/Mistral-7B-v0.1
dtype: float16
merge_method: task_arithmetic
slices:
- sources:
- layer_range: [0, 32]
model: D:\ModelMerge\merges\TestBunny-7B\bunny-a
parameters:
weight: 1.0
- layer_range: [0, 32]
model: D:/ModelMerge/models/LimaRP-Mistral-7B-v0.1
parameters:
weight: 0.08
- layer_range: [0, 32]
model: D:/ModelMerge/models/Mistral-7B-v0.1 |