{ "attn_implementation": "flash_attention_2", "bos_token_id": 151643, "do_sample": true, "eos_token_id": [ 151645, 151643 ], "num_labels": 5, "pad_token_id": 151643, "problem_type": "regression", "temperature": 0.01, "top_k": 1, "top_p": 0.001, "transformers_version": "4.46.2" }