tanyakansal's picture
Upload folder using huggingface_hub
55e8350 verified
raw
history blame contribute delete
No virus
2.5 kB
base_model: /home/evol_merge_storage/input_models/Mistral-7B-v0.1_8133861
dtype: bfloat16
merge_method: task_arithmetic
parameters:
int8_mask: 1.0
normalize: 0.0
slices:
- sources:
- layer_range: [0, 8]
model: /home/evol_merge_storage/input_models/Hermes-2-Pro-Mistral-7B_2793206805
parameters:
weight: 0.45632162073777516
- layer_range: [0, 8]
model: /home/evol_merge_storage/input_models/Dans-AdventurousWinds-Mk2-7b_1152917843
parameters:
weight: 0.6468013824217197
- layer_range: [0, 8]
model: /home/evol_merge_storage/input_models/zephyr-7b-beta_2449712360
parameters:
weight: 0.04013732886365818
- layer_range: [0, 8]
model: /home/evol_merge_storage/input_models/Mistral-7B-v0.1_8133861
- sources:
- layer_range: [8, 16]
model: /home/evol_merge_storage/input_models/Hermes-2-Pro-Mistral-7B_2793206805
parameters:
weight: 0.39991499422381294
- layer_range: [8, 16]
model: /home/evol_merge_storage/input_models/Dans-AdventurousWinds-Mk2-7b_1152917843
parameters:
weight: 0.3033908462486953
- layer_range: [8, 16]
model: /home/evol_merge_storage/input_models/zephyr-7b-beta_2449712360
parameters:
weight: 0.3250623744211742
- layer_range: [8, 16]
model: /home/evol_merge_storage/input_models/Mistral-7B-v0.1_8133861
- sources:
- layer_range: [16, 24]
model: /home/evol_merge_storage/input_models/Hermes-2-Pro-Mistral-7B_2793206805
parameters:
weight: 0.6202885707451035
- layer_range: [16, 24]
model: /home/evol_merge_storage/input_models/Dans-AdventurousWinds-Mk2-7b_1152917843
parameters:
weight: 0.235874597687366
- layer_range: [16, 24]
model: /home/evol_merge_storage/input_models/zephyr-7b-beta_2449712360
parameters:
weight: 0.27733993810853536
- layer_range: [16, 24]
model: /home/evol_merge_storage/input_models/Mistral-7B-v0.1_8133861
- sources:
- layer_range: [24, 32]
model: /home/evol_merge_storage/input_models/Hermes-2-Pro-Mistral-7B_2793206805
parameters:
weight: 0.5484088078896818
- layer_range: [24, 32]
model: /home/evol_merge_storage/input_models/Dans-AdventurousWinds-Mk2-7b_1152917843
parameters:
weight: 0.876059424739865
- layer_range: [24, 32]
model: /home/evol_merge_storage/input_models/zephyr-7b-beta_2449712360
parameters:
weight: 0.26196309089548436
- layer_range: [24, 32]
model: /home/evol_merge_storage/input_models/Mistral-7B-v0.1_8133861