CadenzaBaron commited on
Commit
e0c6099
·
1 Parent(s): 663c169

Upload 7 files

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "facebook/m2m100_418M",
3
  "activation_dropout": 0.0,
4
  "activation_function": "relu",
5
  "architectures": [
@@ -31,7 +31,7 @@
31
  "pad_token_id": 1,
32
  "scale_embedding": true,
33
  "torch_dtype": "float32",
34
- "transformers_version": "4.30.2",
35
  "use_cache": false,
36
  "vocab_size": 128112
37
- }
 
1
  {
2
+ "_name_or_path": "/notebooks/output/checkpoint-final-10737",
3
  "activation_dropout": 0.0,
4
  "activation_function": "relu",
5
  "architectures": [
 
31
  "pad_token_id": 1,
32
  "scale_embedding": true,
33
  "torch_dtype": "float32",
34
+ "transformers_version": "4.21.3",
35
  "use_cache": false,
36
  "vocab_size": 128112
37
+ }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:bf6287788db63af34fd57047464c25e459f094f98a21f6928e527311716aea14
3
- size 1944201798
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8e109da9f339600d7235271d347ebc788c71ecead246553a8422ca0a84a1e4f9
3
+ size 1935792071
tokenizer_config.json CHANGED
@@ -102,16 +102,17 @@
102
  "__zu__"
103
  ],
104
  "bos_token": "<s>",
105
- "clean_up_tokenization_spaces": true,
106
  "eos_token": "</s>",
107
  "language_codes": "m2m100",
108
  "model_max_length": 1024,
 
109
  "num_madeup_words": 8,
110
  "pad_token": "<pad>",
111
  "sep_token": "</s>",
112
  "sp_model_kwargs": {},
113
- "src_lang": "zh",
114
- "tgt_lang": "en",
 
115
  "tokenizer_class": "M2M100Tokenizer",
116
  "tokenizer_file": null,
117
  "unk_token": "<unk>"
 
102
  "__zu__"
103
  ],
104
  "bos_token": "<s>",
 
105
  "eos_token": "</s>",
106
  "language_codes": "m2m100",
107
  "model_max_length": 1024,
108
+ "name_or_path": "/notebooks/output/checkpoint-final-10737",
109
  "num_madeup_words": 8,
110
  "pad_token": "<pad>",
111
  "sep_token": "</s>",
112
  "sp_model_kwargs": {},
113
+ "special_tokens_map_file": "m2m_100_1.2B_v2/special_tokens_map.json",
114
+ "src_lang": null,
115
+ "tgt_lang": null,
116
  "tokenizer_class": "M2M100Tokenizer",
117
  "tokenizer_file": null,
118
  "unk_token": "<unk>"
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1b4ee63fcb956cb6cef0772e37f1c4c73a3705e0dae4360a4d2b1837a1c8b3d6
3
- size 4536
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e218a369adb348c9aa5c6d38be45fcf0b027faa6ca96ab8992872a3232e16787
3
+ size 3439