File size: 1,657 Bytes
db5f1c2
3f66c9c
db5f1c2
 
 
4d6f407
db5f1c2
 
 
 
4afccbf
bd8a029
db5f1c2
 
 
 
 
 
 
 
 
 
 
3f66c9c
db5f1c2
4afccbf
db5f1c2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
{
    "max_new_tokens": 200,
    "max_new_tokens_min": 1,
    "max_new_tokens_max": 2000,
    "seed": -1,
    "name1": "YOU",
    "name2": "Assistant",
    "context": "This is a conversation with your Assistant. The Assistant is very helpful and is eager to chat with you and answer your questions.",
    "greeting": "",
    "end_of_turn": "",
    "custom_stopping_strings": " \"\\n\", \"n n YOUR RESPONSE: \", \"}//end of the scenarios, \", \"Comment: \", \"{{char}}: \",  \"{{user}}: \",  \"Scenario: \",  \"Persona: \",  \"Greeting: \" ",
    "stop_at_newline": true,
    "add_bos_token": true,
    "ban_eos_token": false,
    "skip_special_tokens": true,
    "truncation_length": 2048,
    "truncation_length_min": 0,
    "truncation_length_max": 8192,
    "mode": "cai-chat",
    "instruction_template": "None",
    "chat_prompt_size": 2048,
    "chat_prompt_size_min": 0,
    "chat_prompt_size_max": 2048,
    "chat_generation_attempts": 4,
    "chat_generation_attempts_min": 1,
    "chat_generation_attempts_max": 8,
    "default_extensions": [],
    "chat_default_extensions": [
        "gallery"
    ],
    "presets": {
        "default": "GPT4",
        ".*(alpaca|llama)": "GPT4",
        ".*pygmalion": "GPT4",
        ".*RWKV": "Naive"
    },
    "prompts": {
        "default": "QA",
        ".*(gpt4chan|gpt-4chan|4chan)": "GPT-4chan",
        ".*oasst": "Open Assistant",
        ".*alpaca": "Alpaca"
    },
    "lora_prompts": {
        "default": "QA",
        ".*(alpaca-lora-7b|alpaca-lora-13b|alpaca-lora-30b)": "Alpaca"
    },
    "google_translate-language string": "English",
    "default": "GPT4",
    ".*(alpaca|llama)": "GPT4"
}