sgugger commited on
Commit
41c9a96
1 Parent(s): aa7de73

random model

Browse files
README.md CHANGED
@@ -51,10 +51,6 @@ The following hyperparameters were used during training:
51
  - lr_scheduler_type: linear
52
  - num_epochs: 1
53
 
54
- ### Training results
55
-
56
-
57
-
58
  ### Framework versions
59
 
60
  - Transformers 4.10.0.dev0
51
  - lr_scheduler_type: linear
52
  - num_epochs: 1
53
 
 
 
 
 
54
  ### Framework versions
55
 
56
  - Transformers 4.10.0.dev0
config.json CHANGED
@@ -23,5 +23,5 @@
23
  "transformers_version": "4.10.0.dev0",
24
  "type_vocab_size": 1,
25
  "use_cache": true,
26
- "vocab_size": 12000
27
  }
23
  "transformers_version": "4.10.0.dev0",
24
  "type_vocab_size": 1,
25
  "use_cache": true,
26
+ "vocab_size": 52000
27
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2e6f078adc7b33e532883c54dc53ee965651b6682f8f5254f4394eeadef79b32
3
- size 211020082
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4b7108ad8792e0fcac484f8e45a18c3c9ba7d3484aea9cfea1da0e8e834b23e4
3
+ size 334055467
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"unk_token": "<unk>", "bos_token": "<s>", "eos_token": "</s>", "add_prefix_space": false, "errors": "replace", "sep_token": "</s>", "cls_token": "<s>", "pad_token": "<pad>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": false, "__type": "AddedToken"}, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "roberta-base", "tokenizer_class": "RobertaTokenizer"}
1
+ {"unk_token": "<unk>", "bos_token": "<s>", "eos_token": "</s>", "add_prefix_space": false, "errors": "replace", "sep_token": "</s>", "cls_token": "<s>", "pad_token": "<pad>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": false, "__type": "AddedToken"}, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "esberto-small", "tokenizer_class": "RobertaTokenizer"}
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:582400e615541b1ff4b8414f2dd57a8b159307c1bbbcdc1f04c9fe365ad856f8
3
  size 2607
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8028607595fca8bcbfcca8e8d4d9c5ef3db9d4fe840d18c95fc3a6c3e0b94f5b
3
  size 2607