Akajackson commited on
Commit
b8d0f97
1 Parent(s): 9385f42

Training in progress, epoch 0

Browse files
added_tokens.json CHANGED
@@ -1,3 +1,3 @@
1
  {
2
- "<s_donut_rus>": 62518
3
  }
 
1
  {
2
+ "<s_500k>": 35054
3
  }
sentencepiece.bpe.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef009ae2ede7beded780cfa82e11d2d977473cb6ddf9c3b5bdce3c367497fc01
3
+ size 943862
special_tokens_map.json CHANGED
@@ -1,5 +1,15 @@
1
  {
 
 
2
  "eos_token": "</s>",
 
 
 
 
 
 
 
3
  "pad_token": "<pad>",
 
4
  "unk_token": "<unk>"
5
  }
 
1
  {
2
+ "bos_token": "<s>",
3
+ "cls_token": "<s>",
4
  "eos_token": "</s>",
5
+ "mask_token": {
6
+ "content": "<mask>",
7
+ "lstrip": true,
8
+ "normalized": true,
9
+ "rstrip": false,
10
+ "single_word": false
11
+ },
12
  "pad_token": "<pad>",
13
+ "sep_token": "</s>",
14
  "unk_token": "<unk>"
15
  }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1,13 +1,21 @@
1
  {
 
 
2
  "eos_token": "</s>",
3
- "model_max_length": 512,
 
 
 
 
 
 
 
 
4
  "pad_token": "<pad>",
5
  "processor_class": "DonutProcessor",
6
- "separate_vocabs": false,
7
- "source_lang": "ru",
8
  "sp_model_kwargs": {},
9
- "special_tokens_map_file": null,
10
- "target_lang": "en",
11
- "tokenizer_class": "MarianTokenizer",
12
  "unk_token": "<unk>"
13
  }
 
1
  {
2
+ "bos_token": "<s>",
3
+ "cls_token": "<s>",
4
  "eos_token": "</s>",
5
+ "mask_token": {
6
+ "__type": "AddedToken",
7
+ "content": "<mask>",
8
+ "lstrip": true,
9
+ "normalized": true,
10
+ "rstrip": false,
11
+ "single_word": false
12
+ },
13
+ "model_max_length": 1000000000000000019884624838656,
14
  "pad_token": "<pad>",
15
  "processor_class": "DonutProcessor",
16
+ "sep_token": "</s>",
 
17
  "sp_model_kwargs": {},
18
+ "special_tokens_map_file": "/root/.cache/huggingface/transformers/6ea33bfad55f516eeb1e52bb6e3f5c198c976c6ebfaa7e1fb9f56b33b2ddc8a9.f6ac38e94db1fb749ad0f52af2f7cd5b7863b9b30f1aa680a1b0d22ec09b717d",
19
+ "tokenizer_class": "XLMRobertaTokenizer",
 
20
  "unk_token": "<unk>"
21
  }