Gistral-16B / mergekit_config.yml
ehristoforu's picture
Upload folder using huggingface_hub
8517054 verified
raw
history blame
849 Bytes
slices:
- sources:
- model: Gaivoronsky/Mistral-7B-Saiga
layer_range: [0, 32]
- sources:
- model: HuggingFaceH4/mistral-7b-grok
layer_range: [24, 32]
- sources:
- model: HuggingFaceH4/mistral-7b-anthropic
layer_range: [24, 32]
- sources:
- model: NousResearch/Yarn-Mistral-7b-128k
layer_range: [26, 32]
- sources:
- model: snorkelai/Snorkel-Mistral-PairRM-DPO
layer_range: [26, 32]
- sources:
- model: OpenBuddy/openbuddy-mistral2-7b-v20.3-32k
layer_range: [26, 32]
- sources:
- model: meta-math/MetaMath-Mistral-7B
layer_range: [28, 32]
- sources:
- model: ajibawa-2023/Code-Mistral-7B
layer_range: [28, 32]
- sources:
- model: SherlockAssistant/Mistral-7B-Instruct-Ukrainian
layer_range: [30, 32]
merge_method: passthrough
dtype: bfloat16