jaLLAbi2-7b / mergekit_config.yml
solankibhargav's picture
Upload folder using huggingface_hub
e51c2fd verified
raw
history blame
693 Bytes
models:
- model: eren23/ogno-monarch-jaskier-merge-7b
# No parameters necessary for base model
- model: FelixChao/WestSeverus-7B-DPO-v2
#Emphasize the beginning of Vicuna format models
parameters:
weight: 0.2
density: 0.59
- model: bardsai/jaskier-7b-dpo-v5.6
parameters:
weight: 0.2
density: 0.55
# Vicuna format
- model: AbacusResearch/haLLAwa3
parameters:
weight: 0.3
density: 0.55
- model: cognitivecomputations/WestLake-7B-v2-laser
parameters:
weight: 0.3
density: 0.55
merge_method: dare_ties
base_model: eren23/ogno-monarch-jaskier-merge-7b
parameters:
int8_mask: true
dtype: bfloat16
random_seed: 0