IggoOnCode
First version of the mamba-2.8b-slimpj-OpenOrca_1ep model and tokenizer (copy of EleutherAI/gpt-neox-20b).
b44e736
{ | |
"bos_token": { | |
"content": "<|endoftext|>", | |
"lstrip": false, | |
"normalized": false, | |
"rstrip": false, | |
"single_word": false | |
}, | |
"eos_token": "<|endoftext|>", | |
"pad_token": "<|endoftext|>", | |
"unk_token": { | |
"content": "<|endoftext|>", | |
"lstrip": false, | |
"normalized": false, | |
"rstrip": false, | |
"single_word": false | |
} | |
} | |