laserxtral-exl2 / mergekit_moe_config.yml
bartowski's picture
Quant for 3.5
2f17940 verified
raw
history blame
881 Bytes
base_model: mlabonne/Marcoro14-7B-slerp
gate_mode: hidden # one of "hidden", "cheap_embed", or "random"
dtype: bfloat16 # output dtype (float32, float16, or bfloat16)
experts:
- source_model: ../laserRMT/laser_model_dolphindpo_new2
positive_prompts:
- "chat"
- "assistant"
- "tell me"
- "explain"
- source_model: ../laserRMT/laser_model_codeninja_new
positive_prompts:
- "code"
- "python"
- "javascript"
- "programming"
- "algorithm"
- source_model: ../laserRMT/laser_model_qbert_new
positive_prompts:
- "storywriting"
- "write"
- "scene"
- "story"
- "character"
- source_model: ../laserRMT/laser_model_wizardm
positive_prompts:
- "reason"
- "math"
- "mathematics"
- "solve"
- "count"
#
# To run this
# mergekit-moe --trust-remote-code --random-seed 0 ./moe_laser.yml ./moe_laser