Llama-3-16B / mergekit_config.yml
MaziyarPanahi's picture
Upload folder using huggingface_hub (#1)
783e67a verified
raw
history blame contribute delete
658 Bytes
dtype: float16
merge_method: passthrough
slices:
- sources:
- layer_range: [0, 16]
model: /home/maziyar/apps/LLMs/fine-tuning/merges/multi_merges/merge/models/MaziyarPanahi/Llama-3-11B
- sources:
- layer_range: [8, 24]
model: /home/maziyar/apps/LLMs/fine-tuning/merges/multi_merges/merge/models/MaziyarPanahi/Llama-3-11B
- sources:
- layer_range: [16, 32]
model: /home/maziyar/apps/LLMs/fine-tuning/merges/multi_merges/merge/models/MaziyarPanahi/Llama-3-11B
- sources:
- layer_range: [24, 48]
model: /home/maziyar/apps/LLMs/fine-tuning/merges/multi_merges/merge/models/MaziyarPanahi/Llama-3-11B