miquliz-120b / mergekit_config.yml
wolfram's picture
Upload folder using huggingface_hub
94cef95 verified
raw
history blame contribute delete
713 Bytes
dtype: float16
merge_method: passthrough
slices:
- sources:
- layer_range: [0, 16]
model: 152334H/miqu-1-70b-sf
- sources:
- layer_range: [8, 24]
model: lizpreciatior/lzlv_70b_fp16_hf
- sources:
- layer_range: [17, 32]
model: 152334H/miqu-1-70b-sf
- sources:
- layer_range: [25, 40]
model: lizpreciatior/lzlv_70b_fp16_hf
- sources:
- layer_range: [33, 48]
model: 152334H/miqu-1-70b-sf
- sources:
- layer_range: [41, 56]
model: lizpreciatior/lzlv_70b_fp16_hf
- sources:
- layer_range: [49, 64]
model: 152334H/miqu-1-70b-sf
- sources:
- layer_range: [57, 72]
model: lizpreciatior/lzlv_70b_fp16_hf
- sources:
- layer_range: [65, 80]
model: 152334H/miqu-1-70b-sf