models: | |
- model: Q-bert/MetaMath-Cybertron-Starling | |
parameters: | |
weight: 0.3 | |
density: 0.53 | |
- model: ozayezerceli/BetterSaul-7B-slerp | |
parameters: | |
weight: 0.2 | |
density: 0.53 | |
- model: chihoonlee10/T3Q-Mistral-Orca-Math-DPO | |
parameters: | |
weight: 0.4 | |
density: 0.53 | |
- model: EmbeddedLLM/Mistral-7B-Merge-14-v0.2 | |
parameters: | |
weight: 0.1 | |
density: 0.53 | |
base_model: Gille/StrangeMerges_44-7B-dare_ties | |
merge_method: dare_ties | |
dtype: bfloat16 | |