L3-Hecate-8B-v1.2 / mergekit_config.yml
Azazelle's picture
Upload 14 files
f0e111a verified
raw
history blame
401 Bytes
base_model: NousResearch/Meta-Llama-3-8B-Instruct
dtype: float32
merge_method: task_arithmetic
parameters:
normalize: 0.0
slices:
- sources:
- layer_range: [0, 32]
model: output/hq_rp
parameters:
weight:
- filter: mlp
value: 1.15
- filter: self_attn
value: 1.025
- value: 1.0
- layer_range: [0, 32]
model: NousResearch/Meta-Llama-3-8B-Instruct