models: | |
- model: liminerity/M7-7b | |
parameters: | |
density: [0.87, 0.721, 0.451] # density gradient | |
weight: 0.876 | |
- model: chihoonlee10/T3Q-Mistral-Orca-Math-DPO | |
parameters: | |
density: 0.232 | |
weight: [0.36, 0.3, 0.437, 0.76] # weight gradient | |
- model: yam-peleg/Experiment26-7B | |
parameters: | |
density: 0.475 | |
weight: | |
- filter: mlp | |
value: 0.5 | |
- value: 0 | |
merge_method: ties | |
base_model: LeroyDyer/Mixtral_AI_Cyber_3.1_SFT | |
parameters: | |
normalize: true | |
int8_mask: true | |
dtype: float16 | |