meta-llama-Llama-2-7b-chat-hf-8bit-lmhead-embedtoken-2M-v3-merged-test-4bit-ift
/
generation_config.json
{ | |
"bos_token_id": 1, | |
"do_sample": true, | |
"eos_token_id": 2, | |
"max_length": 4096, | |
"pad_token_id": 0, | |
"temperature": 0.6, | |
"top_p": 0.9, | |
"transformers_version": "4.44.2" | |
} | |