aegolius-acadicus-v1-30b / mergekit_moe_config.yml
ibivibiv's picture
Create mergekit_moe_config.yml
59b0524 verified
raw
history blame
877 Bytes
base_model: senseable/WestLake-7B-v2
gate_mode: hidden
experts:
- source_model: senseable/Westlake-7B
positive_prompts:
- "logical reasoning"
negative_prompts:
- "commonsense reasoning"
- source_model: senseable/WestLake-7B-v2
positive_prompts:
- "commonsense reasoning"
- "ambiguity resolution"
negative_prompts:
- "logical reasoning"
- "scientific knowledge"
- source_model: andysalerno/openchat-nectar-0.5
positive_prompts:
- "multidisciplinary knowledge"
negative_prompts:
- "natural language understanding"
- source_model: FelixChao/WestSeverus-7B-DPO-v2
positive_prompts:
- "fact-checking"
negative_prompts:
- "logical reasoning"
- source_model: PetroGPT/WestSeverus-7B-DPO
positive_prompts:
- "mathematical reasoning"
negative_prompts:
- "natural language understanding"