kobart_chatbot_social_media-e10_2 / tokenizer_config.json
JeongJunNyeong
Upload tokenizer
ae96e0e
raw
history blame
496 Bytes
{
"model_input_names": [
"input_ids",
"attention_mask"
],
"model_max_length": 1000000000000000019884624838656,
"name_or_path": "final-project-level3-nlp-13/saved_models/hyunwoongko/kobart/chatbot_social_media_10epoch_01-21-22-43",
"special_tokens_map_file": "/root/.cache/huggingface/transformers/a87d2ed77831bb40ce806a97c04126addf5ecc82b3e23ecf916b2a4acdb9c29a.c23d5e62137984cf842a885705037b25b156747d145406702932d5f5d5e7c88e",
"tokenizer_class": "PreTrainedTokenizerFast"
}