models: | |
- model: failspy/Llama-3-8B-Instruct-MopeyMule | |
- model: flammenai/Mahou-1.0-llama3-8B # 7/10 | |
parameters: | |
density: 0.4 | |
weight: 0.14 | |
- model: TheSkullery/llama-3-cat-8b-instruct-v1 # 6/10 | |
parameters: | |
density: 0.3 | |
weight: 0.1 | |
- model: Nitral-AI/Poppy_Porpoise-1.0-L3-8B # 7/10 | |
parameters: | |
density: 0.5 | |
weight: 0.18 | |
- model: openlynn/Llama-3-Soliloquy-8B-v2 # 8/10 | |
parameters: | |
density: 0.5 | |
weight: 0.18 | |
- model: Hastagaras/UltimateANJIR-8B-L3-Blackroot # 6/10 | |
parameters: | |
density: 0.3 | |
weight: 0.1 | |
- model: NeverSleep/Llama-3-Lumimaid-8B-v0.1-OAS # 7/10 | |
parameters: | |
density: 0.4 | |
weight: 0.14 | |
- model: Sao10K/L3-8B-Stheno-v3.1 # 9/10 | |
parameters: | |
density: 0.6 | |
weight: 0.23 | |
merge_method: breadcrumbs_ties | |
base_model: failspy/Llama-3-8B-Instruct-MopeyMule | |
parameters: | |
normalize: false | |
rescale: true | |
gamma: 0.01 | |
dtype: float16 | |
name: Peter |