meiiny00 commited on
Commit
2afa447
1 Parent(s): 51f21ab

Upload tokenizer

Browse files
Files changed (4) hide show
  1. README.md +3 -3
  2. added_tokens.json +2 -2
  3. tokenizer_config.json +4 -5
  4. vocab.json +73 -48
README.md CHANGED
@@ -1,11 +1,11 @@
1
  ---
 
2
  library_name: transformers
3
  license: mit
4
- base_model: facebook/w2v-bert-2.0
5
- tags:
6
- - generated_from_trainer
7
  metrics:
8
  - wer
 
 
9
  model-index:
10
  - name: w2v-bert-2.0-15red
11
  results: []
 
1
  ---
2
+ base_model: facebook/w2v-bert-2.0
3
  library_name: transformers
4
  license: mit
 
 
 
5
  metrics:
6
  - wer
7
+ tags:
8
+ - generated_from_trainer
9
  model-index:
10
  - name: w2v-bert-2.0-15red
11
  results: []
added_tokens.json CHANGED
@@ -1,4 +1,4 @@
1
  {
2
- "</s>": 50,
3
- "<s>": 49
4
  }
 
1
  {
2
+ "</s>": 75,
3
+ "<s>": 74
4
  }
tokenizer_config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "added_tokens_decoder": {
3
- "47": {
4
  "content": "[UNK]",
5
  "lstrip": true,
6
  "normalized": false,
@@ -8,7 +8,7 @@
8
  "single_word": false,
9
  "special": false
10
  },
11
- "48": {
12
  "content": "[PAD]",
13
  "lstrip": true,
14
  "normalized": false,
@@ -16,7 +16,7 @@
16
  "single_word": false,
17
  "special": false
18
  },
19
- "49": {
20
  "content": "<s>",
21
  "lstrip": false,
22
  "normalized": false,
@@ -24,7 +24,7 @@
24
  "single_word": false,
25
  "special": true
26
  },
27
- "50": {
28
  "content": "</s>",
29
  "lstrip": false,
30
  "normalized": false,
@@ -39,7 +39,6 @@
39
  "eos_token": "</s>",
40
  "model_max_length": 1000000000000000019884624838656,
41
  "pad_token": "[PAD]",
42
- "processor_class": "Wav2Vec2BertProcessor",
43
  "replace_word_delimiter_char": " ",
44
  "target_lang": null,
45
  "tokenizer_class": "Wav2Vec2CTCTokenizer",
 
1
  {
2
  "added_tokens_decoder": {
3
+ "72": {
4
  "content": "[UNK]",
5
  "lstrip": true,
6
  "normalized": false,
 
8
  "single_word": false,
9
  "special": false
10
  },
11
+ "73": {
12
  "content": "[PAD]",
13
  "lstrip": true,
14
  "normalized": false,
 
16
  "single_word": false,
17
  "special": false
18
  },
19
+ "74": {
20
  "content": "<s>",
21
  "lstrip": false,
22
  "normalized": false,
 
24
  "single_word": false,
25
  "special": true
26
  },
27
+ "75": {
28
  "content": "</s>",
29
  "lstrip": false,
30
  "normalized": false,
 
39
  "eos_token": "</s>",
40
  "model_max_length": 1000000000000000019884624838656,
41
  "pad_token": "[PAD]",
 
42
  "replace_word_delimiter_char": " ",
43
  "target_lang": null,
44
  "tokenizer_class": "Wav2Vec2CTCTokenizer",
vocab.json CHANGED
@@ -1,51 +1,76 @@
1
  {
2
- "0": 1,
3
- "1": 2,
4
- "2": 3,
5
- "3": 4,
6
- "4": 5,
7
- "5": 6,
8
- "6": 7,
9
- "7": 8,
10
- "8": 9,
11
- "9": 10,
12
- "[PAD]": 48,
13
- "[UNK]": 47,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
  "|": 0,
15
- "а": 11,
16
- "б": 12,
17
- "в": 13,
18
- "г": 14,
19
- "д": 15,
20
- "е": 16,
21
- "ж": 17,
22
- "з": 18,
23
- "и": 19,
24
- "й": 20,
25
- "к": 21,
26
- "л": 22,
27
- "м": 23,
28
- "н": 24,
29
- "о": 25,
30
- "п": 26,
31
- "р": 27,
32
- "с": 28,
33
- "т": 29,
34
- "у": 30,
35
- "ф": 31,
36
- "х": 32,
37
- "ц": 33,
38
- "ч": 34,
39
- "ш": 35,
40
- "щ": 36,
41
- "ъ": 37,
42
- "ы": 38,
43
- "ь": 39,
44
- "э": 40,
45
- "ю": 41,
46
- "я": 42,
47
- "ё": 43,
48
- "ѳ": 44,
49
- "ү": 45,
50
- "ө": 46
51
  }
 
1
  {
2
+ "#": 1,
3
+ "*": 2,
4
+ "/": 3,
5
+ "0": 4,
6
+ "1": 5,
7
+ "2": 6,
8
+ "3": 7,
9
+ "4": 8,
10
+ "5": 9,
11
+ "6": 10,
12
+ "7": 11,
13
+ "8": 12,
14
+ "9": 13,
15
+ "@": 14,
16
+ "[PAD]": 73,
17
+ "[UNK]": 72,
18
+ "_": 15,
19
+ "a": 16,
20
+ "b": 17,
21
+ "c": 18,
22
+ "d": 19,
23
+ "e": 20,
24
+ "g": 21,
25
+ "h": 22,
26
+ "i": 23,
27
+ "k": 24,
28
+ "l": 25,
29
+ "m": 26,
30
+ "n": 27,
31
+ "o": 28,
32
+ "p": 29,
33
+ "q": 30,
34
+ "r": 31,
35
+ "s": 32,
36
+ "t": 33,
37
+ "u": 34,
38
+ "v": 35,
39
+ "x": 36,
40
+ "y": 37,
41
  "|": 0,
42
+ "а": 38,
43
+ "б": 39,
44
+ "в": 40,
45
+ "г": 41,
46
+ "д": 42,
47
+ "е": 43,
48
+ "ж": 44,
49
+ "з": 45,
50
+ "и": 46,
51
+ "й": 47,
52
+ "к": 48,
53
+ "л": 49,
54
+ "м": 50,
55
+ "н": 51,
56
+ "о": 52,
57
+ "п": 53,
58
+ "р": 54,
59
+ "с": 55,
60
+ "т": 56,
61
+ "у": 57,
62
+ "ф": 58,
63
+ "х": 59,
64
+ "ц": 60,
65
+ "ч": 61,
66
+ "ш": 62,
67
+ "ъ": 63,
68
+ "ы": 64,
69
+ "ь": 65,
70
+ "э": 66,
71
+ "ю": 67,
72
+ "я": 68,
73
+ "ё": 69,
74
+ "ү": 70,
75
+ "ө": 71
 
 
76
  }