File size: 259 Bytes
7794bf9
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
{
  "_attn_implementation": "flash_attention_2",
  "bos_token_id": 1,
  "do_sample": true,
  "eos_token_id": 2,
  "max_length": 4096,
  "pad_token_id": 0,
  "rope_theta": 250000000.0,
  "temperature": 0.6,
  "top_p": 0.9,
  "transformers_version": "4.39.2"
}