infinitejoy commited on
Commit
70ceede
1 Parent(s): 2454c51

Training in progress, step 500

Browse files
added_tokens.json ADDED
@@ -0,0 +1 @@
 
1
+ {"<s>": 43, "</s>": 44}
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:05e73b3cda09f1aebfc3e644f406383db24f39f8a9824d266ff7b18994d3ec7d
3
  size 1262108145
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5ed95f300a3acca2fbe544a84ace8e922c1626c0f4673b3983cc741e87eada02
3
  size 1262108145
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "[UNK]", "pad_token": "[PAD]", "additional_special_tokens": [{"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}]}
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
1
+ {"unk_token": "[UNK]", "bos_token": "<s>", "eos_token": "</s>", "pad_token": "[PAD]", "do_lower_case": false, "word_delimiter_token": "|", "special_tokens_map_file": null, "tokenizer_file": null, "name_or_path": "./wav2vec2-large-xls-r-300m-abkhaz", "tokenizer_class": "Wav2Vec2CTCTokenizer"}
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ef7652ae6c77793388b9d749645dab65d9c7a7de2bd5de823ce321232ce28c7c
3
  size 3055
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:549d99b4e3bd0d698892d554b57ba880ccbc88760013db079f9c059ea80d37b6
3
  size 3055
vocab.json ADDED
@@ -0,0 +1 @@
 
1
+ {"а": 1, "б": 2, "в": 3, "г": 4, "д": 5, "е": 6, "ж": 7, "з": 8, "и": 9, "к": 10, "л": 11, "м": 12, "н": 13, "о": 14, "п": 15, "р": 16, "с": 17, "т": 18, "у": 19, "ф": 20, "х": 21, "ц": 22, "ч": 23, "ш": 24, "ы": 25, "ь": 26, "џ": 27, "қ": 28, "ҟ": 29, "ҩ": 30, "ҭ": 31, "ҳ": 32, "ҵ": 33, "ҷ": 34, "ҽ": 35, "ҿ": 36, "ә": 37, "ӡ": 38, "ӷ": 39, "ԥ": 40, "|": 0, "[UNK]": 41, "[PAD]": 42}