DarkSapling-7B-v2.0 / mergekit-config.yml
TeeZee's picture
Update mergekit-config.yml
8b398b9 verified
raw
history blame
575 Bytes
models:
- model: "../cognitivecomputations_samantha-mistral-7b"
- model: "../cognitivecomputations_dolphin-2.6-mistral-7b-dpo-laser"
parameters:
weight: 0.19
density: 0.5
- model: "../KoboldAI_Mistral-7B-Holodeck-1"
parameters:
weight: 0.3
density: 1.0
- model: "../KoboldAI_Mistral-7B-Erebus-v3"
parameters:
weight: 0.09
density: 1.0
merge_method: dare_ties
tokenizer_source: union
base_model: "../cognitivecomputations_samantha-mistral-7b"
parameters:
int8_mask: true
dtype: bfloat16
name: darksapling12_dire_ties