{ | |
"tokenizers": { | |
"item": "EntityTokenizer", | |
"context": "GPT2TokenizerFast", | |
"prompt": "RobertaTokenizerFast" | |
}, | |
"tokenizer_key_for_decoding": "context" | |
} |
{ | |
"tokenizers": { | |
"item": "EntityTokenizer", | |
"context": "GPT2TokenizerFast", | |
"prompt": "RobertaTokenizerFast" | |
}, | |
"tokenizer_key_for_decoding": "context" | |
} |