{ "_attn_implementation": "flash_attention_2", "bos_token_id": 1, "do_sample": true, "eos_token_id": 2, "max_length": 4096, "pad_token_id": 0, "rope_theta": 250000000.0, "temperature": 0.6, "top_p": 0.9, "transformers_version": "4.39.2" }