Ryu-4x7B-MoE-bf16 / mergekit_moe_config.yml
Kquant03's picture
Upload 12 files
7967669 verified
base_model: fblgit/UNA-TheBeagle-7b-v1
gate_mode: hidden
dtype: bfloat16
experts:
- source_model: Open-Orca/Mistral-7B-OpenOrca
positive_prompts:
- "Fantasy"
- "Exciting"
- "Interesting"
- "Setting"
- "Landscape"
- "Fantastic"
- "Magical"
- "Storywriting"
- "Roleplay"
- "Fictional"
negative_prompts:
- "Realistic"
- "Nonfiction"
- "Historical"
- "Fact"
- "Factual"
- source_model: samir-fama/FernandoGPT-v1
positive_prompts:
- "Math"
- "Programming"
- "Engineering"
- "Science"
- "Reasoning"
- "Logical"
- "logically"
- "efficient"
- "accurate"
- "effective"
- "intelligent"
negative_prompts:
- "inaccurate"
- "incorrect"
- "wrong"
- "nondescript"
- "stupid"
- "unintuitive"
- "vague"
- source_model: fblgit/UNA-TheBeagle-7b-v1
positive_prompts:
- "Discuss"
- "Chat"
- "engaging"
- "stimulating"
- "intense"
- "information"
negative_prompts:
- "Sorry"
- "As an AI"
- "I cannot"
- "I am not capable"
- "this request"
- source_model: Neuronovo/neuronovo-7B-v0.3
positive_prompts:
- "pragmatic"
- "logical"
- "helpful"
- "descriptive"
- "intelligent"
- "masterful"
- "precise"
negative_prompts:
- "unhelpful"
- "inaccurate"
- "vague"
- "nondescript"
- "improper"