File size: 661 Bytes
bfd3224
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27

base_model: cognitivecomputations/dolphin-2.6-mistral-7b-dpo-laser
gate_mode: hidden # one of "hidden", "cheap_embed", or "random"
dtype: bfloat16 # output dtype (float32, float16, or bfloat16)
experts:
  - source_model: cognitivecomputations/dolphin-2.6-mistral-7b-dpo-laser
    positive_prompts:
    - "chat"
    - "assistant"
    - "tell me"
    - "explain"
    - "code"
    - "programming"
  - source_model: LunaticPython161/CyberWitch-7B
    positive_prompts:
    - "solve"
    - "count"
    - "math"
    - "mathematics"
    - "algorithm"
    - "cypher"
    - "cybersecurity"
    - "penetration testing"
    - "red team"
    - "blue team"
    - "hacking"