3_PRYMMAL-ECE-7B-SLERP-V1 / mergekit_config.yml
L-RAGE's picture
Upload folder using huggingface_hub
483902d verified
raw
history blame contribute delete
414 Bytes
slices:
- sources:
- model: Marsouuu/Qwen1_78-ECE-PRYMMAL-Martial
layer_range: [0, 28]
- model: LilRg/ECE-1B-merge-PRYMMAL
layer_range: [0, 28]
merge_method: slerp
base_model: Marsouuu/Qwen1_78-ECE-PRYMMAL-Martial
parameters:
t:
- filter: self_attn
value: [0, 0.25, 0.5, 0.75, 1]
- filter: mlp
value: [1, 0.75, 0.5, 0.25, 0]
- value: 0.5
dtype: bfloat16