Llama3-8B-MetaMath / special_tokens_map.json
ntnhan's picture
Upload tokenizer
51217fa verified
raw
history blame contribute delete
301 Bytes
{
"bos_token": {
"content": "<|begin_of_text|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"eos_token": {
"content": "<|end_of_text|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
}
}