Ksgk-fy's picture
Upload folder using huggingface_hub
6ce2ea2 verified
raw
history blame contribute delete
561 Bytes
slices:
- sources:
- model: liminerity/M7-7b
layer_range: [0, 32]
- model: AurelPx/Percival_01-7b-slerp
layer_range: [0, 32]
merge_method: slerp
base_model: liminerity/M7-7b
parameters:
t:
- filter: self_attn
value: [0.6842220974952672, 0.46727995958304114, 0.7989557128685841, 0.17557699884860734, 0.3590549769955834]
- filter: mlp
value: [0.3157779025047328, 0.5327200404169589, 0.8244230011513927, 0.8244230011513927, 0.6409450230044166]
- value: 0.9080883966262864
dtype: bfloat16
random_seed: 0