hf-transformers-bot commited on
Commit
a1a0027
·
1 Parent(s): 2000b20

Update tiny models for Wav2Vec2ForSequenceClassification

Browse files
Files changed (3) hide show
  1. config.json +2 -1
  2. pytorch_model.bin +1 -1
  3. tokenizer_config.json +2 -2
config.json CHANGED
@@ -1,5 +1,6 @@
1
  {
2
  "activation_dropout": 0.1,
 
3
  "adapter_kernel_size": 3,
4
  "adapter_stride": 2,
5
  "add_adapter": false,
@@ -79,7 +80,7 @@
79
  3
80
  ],
81
  "torch_dtype": "float32",
82
- "transformers_version": "4.25.0.dev0",
83
  "use_weighted_layer_sum": false,
84
  "vocab_size": 32,
85
  "xvector_output_dim": 32
 
1
  {
2
  "activation_dropout": 0.1,
3
+ "adapter_attn_dim": null,
4
  "adapter_kernel_size": 3,
5
  "adapter_stride": 2,
6
  "add_adapter": false,
 
80
  3
81
  ],
82
  "torch_dtype": "float32",
83
+ "transformers_version": "4.31.0.dev0",
84
  "use_weighted_layer_sum": false,
85
  "vocab_size": 32,
86
  "xvector_output_dim": 32
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:06e668c54286532f479f24aa926108936ad522be9d736c8d8293b14455b4b27c
3
  size 153130
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef61ce2420c31e0450d0c445376bac1c4c345a3d65d46c1dc606702a292b9714
3
  size 153130
tokenizer_config.json CHANGED
@@ -1,14 +1,14 @@
1
  {
2
  "bos_token": "<s>",
 
3
  "do_lower_case": false,
4
  "do_normalize": true,
5
  "eos_token": "</s>",
6
  "model_max_length": 9223372036854775807,
7
- "name_or_path": "facebook/wav2vec2-base-960h",
8
  "pad_token": "<pad>",
9
  "replace_word_delimiter_char": " ",
10
  "return_attention_mask": false,
11
- "special_tokens_map_file": "/home/huggingface/.cache/huggingface/hub/models--facebook--wav2vec2-base-960h/snapshots/22aad52d435eb6dbaf354bdad9b0da84ce7d6156/special_tokens_map.json",
12
  "tokenizer_class": "Wav2Vec2CTCTokenizer",
13
  "unk_token": "<unk>",
14
  "word_delimiter_token": "|"
 
1
  {
2
  "bos_token": "<s>",
3
+ "clean_up_tokenization_spaces": true,
4
  "do_lower_case": false,
5
  "do_normalize": true,
6
  "eos_token": "</s>",
7
  "model_max_length": 9223372036854775807,
 
8
  "pad_token": "<pad>",
9
  "replace_word_delimiter_char": " ",
10
  "return_attention_mask": false,
11
+ "target_lang": null,
12
  "tokenizer_class": "Wav2Vec2CTCTokenizer",
13
  "unk_token": "<unk>",
14
  "word_delimiter_token": "|"