phixtral-2x2_8 / mergekit_moe_config.yml
mlabonne's picture
Upload folder using huggingface_hub
e32b161
raw
history blame
232 Bytes
base_model: cognitivecomputations/dolphin-2_6-phi-2
gate_mode: cheap_embed
experts:
- source_model: cognitivecomputations/dolphin-2_6-phi-2
positive_prompts: [""]
- source_model: lxuechen/phi-2-dpo
positive_prompts: [""]