brucethemoose's picture
Upload folder using huggingface_hub
4e5986b verified
base_model: /home/alpha/Storage/Models/Raw/chargoddard_Yi-34B-200K-Llama
dtype: bfloat16
merge_method: dare_ties
parameters:
int8_mask: 1.0
slices:
- sources:
- layer_range: [0, 60]
model: /home/alpha/Storage/Models/Raw/chargoddard_Yi-34B-200K-Llama
- layer_range: [0, 60]
model: /home/alpha/Storage/Models/Raw/migtissera_Tess-34B-v1.4
parameters:
density: 0.61
weight: [0.22, 0.113, 0.113, 0.113, 0.113, 0.113]
- layer_range: [0, 60]
model: /home/alpha/Models/Raw/Mihaiii_Pallas-0.5
parameters:
density: 0.61
weight: [0.22, 0.113, 0.113, 0.113, 0.113, 0.113]
- layer_range: [0, 60]
model: /home/alpha//Storage/Models/Raw/bhenrym14_airoboros-3_1-yi-34b-200k
parameters:
density: 0.59
weight: [0.02, 0.081, 0.081, 0.081, 0.081, 0.081]
- layer_range: [0, 60]
model: /home/alpha/Storage/Models/Raw/jondurbin_bagel-34b-v0.2
parameters:
density: 0.4
weight: [0.02, 0.093, 0.093, 0.093, 0.093, 0.093]
- layer_range: [0, 60]
model: /home/alpha/Storage/Models/Raw/kyujinpy_PlatYi-34B-200k-Q-FastChat
parameters:
density: 0.59
weight: [0.02, 0.081, 0.081, 0.081, 0.081, 0.081]
- layer_range: [0, 60]
model: /home/alpha/Models/Raw/adamo1139_Yi-34B-200K-AEZAKMI-v2
parameters:
density: 0.59
weight: [0.02, 0.096, 0.096, 0.096, 0.096, 0.096]
- layer_range: [0, 60]
model: /home/alpha/Storage/Models/Raw/Nous-Capybara-34B
parameters:
density: 0.59
weight: [0.21, 0.115, 0.115, 0.115, 0.115, 0.115]
- layer_range: [0, 60]
model: /home/alpha/FastModels/v8/4kmerge-v2
parameters:
density: 0.4
weight: [0.02, 0.115, 0.115, 0.115, 0.115, 0.115]
- layer_range: [0, 60]
model: /home/alpha/Models/Raw/migtissera_Tess-M-Creative-v1.0
parameters:
density: 0.61
weight: [0.21, 0.09, 0.09, 0.09, 0.09, 0.09]
- layer_range: [0, 60]
model: /home/alpha/Models/Raw/TriadParty_deepmoney-34b-200k-base
parameters:
density: 0.61
weight: [0.04, 0.103, 0.103, 0.103, 0.103, 0.103]
tokenizer_source: union