EstopianMaid-13B / mergekit_config.yml
KatyTheCutie's picture
Upload folder using huggingface_hub
cd06a1c verified
raw
history blame
663 Bytes
base_model: TheBloke/Llama-2-13B-fp16
dtype: float16
merge_method: task_arithmetic
slices:
- sources:
- layer_range: [0, 40]
model: TheBloke/Llama-2-13B-fp16
- layer_range: [0, 40]
model: BlueNipples/TimeCrystal-l2-13B
parameters:
weight: 0.75
- layer_range: [0, 40]
model: cgato/Thespis-13b-DPO-v0.7
parameters:
weight: 0.23
- layer_range: [0, 40]
model: KoboldAI/LLaMA2-13B-Estopia
parameters:
weight: 0.15
- layer_range: [0, 40]
model: NeverSleep/Noromaid-13B-0.4-DPO
parameters:
weight: 0.2
- layer_range: [0, 40]
model: Doctor-Shotgun/cat-v1.0-13b
parameters:
weight: 0.03