aravind-812 commited on
Commit
6f920a7
1 Parent(s): d51369a

First version of the your-model-name model and tokenizer.

Browse files
language_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f02da5f1e511f83c3ed01f1fc0820ae3fac82bb7d21245c2b8626890013151f3
3
+ size 1421605239
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
processor_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"baskets": [], "data_dir": "data", "dev_filename": "dev-v2.0.json", "dev_split": 0, "doc_stride": 128, "max_answers": 6, "max_query_length": 64, "max_seq_len": 256, "ph_output_type": "per_token_squad", "proxies": null, "sp_toks_end": 1, "sp_toks_mid": 2, "sp_toks_start": 1, "target": "classification", "tasks": {"question_answering": {"label_list": ["start_token", "end_token"], "metric": "squad", "label_tensor_name": "question_answering_label_ids", "label_name": "question_answering_label", "label_column_name": null, "text_column_name": null, "task_type": null}}, "test_filename": null, "train_filename": "train-v2.0.json", "tokenizer": "RobertaTokenizer", "processor": "SquadProcessor"}
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "pad_token": "<pad>", "cls_token": "<s>", "mask_token": "<mask>"}
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"do_lower_case": false, "model_max_length": 512, "bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "pad_token": "<pad>", "cls_token": "<s>", "mask_token": "<mask>", "special_tokens_map_file": "/root/.cache/torch/transformers/cc43f10372bf085733243a9db0765736e2dc015d48f6e8a96de4be0f6ef6a9fc.16f949018cf247a2ea7465a74ca9a292212875e5fd72f969e0807011e7f192e4", "full_tokenizer_file": null}
vocab.json ADDED
The diff for this file is too large to render. See raw diff