Update generation_config.json
Browse files- generation_config.json +9 -1
generation_config.json
CHANGED
@@ -3,5 +3,13 @@
|
|
3 |
"bos_token_id": 1,
|
4 |
"eos_token_id": 2,
|
5 |
"pad_token_id": 0,
|
6 |
-
"transformers_version": "4.35.2"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
7 |
}
|
|
|
3 |
"bos_token_id": 1,
|
4 |
"eos_token_id": 2,
|
5 |
"pad_token_id": 0,
|
6 |
+
"transformers_version": "4.35.2",
|
7 |
+
|
8 |
+
"max_length": 512, // Limits the length of the output text
|
9 |
+
"temperature": 0.7, // Controls randomness, higher means more random
|
10 |
+
"top_p": 0.9, // Nucleus sampling, considers only top_p highest probability tokens
|
11 |
+
"top_k": 50, // Limits the sample space to top_k tokens with highest probabilities
|
12 |
+
"do_sample": true, // Enables sampling instead of greedy decoding
|
13 |
+
"repetition_penalty": 1.2, // Penalizes repetition
|
14 |
+
"num_return_sequences": 1 // Number of output sequences to generate
|
15 |
}
|