conjuring92 commited on
Commit
ec5d7f9
1 Parent(s): 8a592c9

Training in progress epoch 0

Browse files
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:698c964702f54958f7a406db5a67368daf6f392e696841e2cdc26821d0dad9d4
3
  size 328727211
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8c3b87b2c67ba99f100d491694fb1a45856af0dbc95c4a19b4a40ba1941ff84c
3
  size 328727211
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "pad_token": "<pad>", "cls_token": "<s>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": false}}
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"unk_token": "<unk>", "bos_token": "<s>", "eos_token": "</s>", "add_prefix_space": false, "errors": "replace", "sep_token": "</s>", "cls_token": "<s>", "pad_token": "<pad>", "mask_token": "<mask>", "trim_offsets": true, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "distilroberta-base", "tokenizer_class": "RobertaTokenizer"}
vocab.json ADDED
The diff for this file is too large to render. See raw diff