File size: 238 Bytes
36fab72
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
{
  "_from_model_config": true,
  "bos_token_id": 1,
  "eos_token_id": 2,
  "max_new_tokens": 1024,
  "min_new_tokens": 2,
  "pad_token_id": 2,
  "temperature": null,
  "top_k": null,
  "top_p": null,
  "transformers_version": "4.40.2"
}