Fernando Carneiro commited on
Commit
67110d0
1 Parent(s): 5af2a4f

Fix model from pretraining

Browse files
added_tokens.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"<mask>": 64004}
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "C:\\Users\\y435\\temp\\models\\bertweetbr\\7",
3
  "architectures": [
4
  "RobertaForMaskedLM"
5
  ],
 
1
  {
2
+ "_name_or_path": "C:\\\\Users\\\\y435\\\\.cache\\bertweetbr\\checkpoint-4650000",
3
  "architectures": [
4
  "RobertaForMaskedLM"
5
  ],
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2ab894c64a4266806906f446980948e9894e0e3ee084da86fa5ad3225e67c69c
3
  size 539940395
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3b4d1fd54041dd34b988eb37d2b200a55f23dd671315c0c1868c012dc86b2e46
3
  size 539940395
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"normalization": false, "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "max_len": 128, "tokenizer_class": "BertweetTokenizer"}
 
1
+ {"normalization": false, "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "max_len": 128, "special_tokens_map_file": "C:\\Users\\y435\\.cache\\huggingface\\transformers\\d3ac3efbfcf6ec1d20748abc90e96ca8452f1fc27cdbf11c887a5bbf50c26cd6.0dc5b1041f62041ebbd23b1297f2f573769d5c97d8b7c28180ec86b8f6185aa8", "tokenizer_file": null, "name_or_path": "melll-uff/bertweetbr", "tokenizer_class": "BertweetTokenizer"}