boumehdi commited on
Commit
711bff1
1 Parent(s): bd04eb6

Upload 9 files

Browse files
config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
- "_name_or_path": "boumehdi/wav2vec2-large-xlsr-moroccan-darija",
3
- "activation_dropout": 0.1,
4
  "adapter_kernel_size": 3,
5
  "adapter_stride": 2,
6
  "add_adapter": false,
@@ -11,7 +11,7 @@
11
  "attention_dropout": 0.0,
12
  "bos_token_id": 1,
13
  "classifier_proj_size": 256,
14
- "codevector_dim": 256,
15
  "contrastive_logits_temperature": 0.1,
16
  "conv_bias": true,
17
  "conv_dim": [
@@ -42,7 +42,7 @@
42
  2
43
  ],
44
  "ctc_loss_reduction": "mean",
45
- "ctc_zero_infinity": true,
46
  "diversity_loss_weight": 0.1,
47
  "do_stable_layer_norm": true,
48
  "eos_token_id": 2,
@@ -52,6 +52,7 @@
52
  "feat_proj_dropout": 0.0,
53
  "feat_quantizer_dropout": 0.0,
54
  "final_dropout": 0.0,
 
55
  "hidden_act": "gelu",
56
  "hidden_dropout": 0.0,
57
  "hidden_size": 1024,
@@ -85,7 +86,7 @@
85
  "num_negatives": 100,
86
  "output_hidden_size": 1024,
87
  "pad_token_id": 64,
88
- "proj_codevector_dim": 256,
89
  "tdnn_dilation": [
90
  1,
91
  2,
@@ -108,7 +109,7 @@
108
  1
109
  ],
110
  "torch_dtype": "float32",
111
- "transformers_version": "4.17.0",
112
  "use_weighted_layer_sum": false,
113
  "vocab_size": 65,
114
  "xvector_output_dim": 512
 
1
  {
2
+ "_name_or_path": "C:\\workspace\\checkpoints\\",
3
+ "activation_dropout": 0.0,
4
  "adapter_kernel_size": 3,
5
  "adapter_stride": 2,
6
  "add_adapter": false,
 
11
  "attention_dropout": 0.0,
12
  "bos_token_id": 1,
13
  "classifier_proj_size": 256,
14
+ "codevector_dim": 768,
15
  "contrastive_logits_temperature": 0.1,
16
  "conv_bias": true,
17
  "conv_dim": [
 
42
  2
43
  ],
44
  "ctc_loss_reduction": "mean",
45
+ "ctc_zero_infinity": false,
46
  "diversity_loss_weight": 0.1,
47
  "do_stable_layer_norm": true,
48
  "eos_token_id": 2,
 
52
  "feat_proj_dropout": 0.0,
53
  "feat_quantizer_dropout": 0.0,
54
  "final_dropout": 0.0,
55
+ "gradient_checkpointing": false,
56
  "hidden_act": "gelu",
57
  "hidden_dropout": 0.0,
58
  "hidden_size": 1024,
 
86
  "num_negatives": 100,
87
  "output_hidden_size": 1024,
88
  "pad_token_id": 64,
89
+ "proj_codevector_dim": 768,
90
  "tdnn_dilation": [
91
  1,
92
  2,
 
109
  1
110
  ],
111
  "torch_dtype": "float32",
112
+ "transformers_version": "4.21.1",
113
  "use_weighted_layer_sum": false,
114
  "vocab_size": 65,
115
  "xvector_output_dim": 512
optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:948af3977597afde841a34f4665448ffeb3ff17828e75c97eeabe86b3b53e081
3
  size 2490594117
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:020736b362ccd4994ec153c58e33efb70e2983f86913170c881f326032927472
3
  size 2490594117
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a8598096bca12fa19715543c9b7b7f0c6dde5128575a3e1b065adbf1671734e5
3
- size 1262195949
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:764829c2d5a7496eebf8c02d4f9a9985b498933b88d98cd02a1032620ab26555
3
+ size 1262168365
rng_state.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:57eca636cd11203a89a00153889e3d03cff43f61f0afc69caeed77f41d6b46b6
3
- size 14639
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b6cefc11e17e257638ad9c5b3eb6ec764c29faca94325cfa58d31d81e813b6bc
3
+ size 14575
scaler.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a4f7023595742b93a34bcd892893900f00f4a4b2973dfb9b88f56fcbf5c8114c
3
  size 557
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a35f841e23df4770d6b9c48eff56102a1275a3011f375e0e74a40749956bbd2c
3
  size 557
scheduler.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c2823acf781884f62389470c36145aec62e2ec5ad1b7cc27f7076d68d8a206b1
3
  size 627
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2a2d07290d77bd12c9ef7653c100cf6f84d10d4718cfca1f85f07b80a0afb870
3
  size 627
trainer_state.json CHANGED
The diff for this file is too large to render. See raw diff
 
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0f1039d08bad8d25a75718b4db53ef6daa3948628daccd58d428c725c6e26415
3
- size 3003
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2dc4ca5222d0dbb8c6bbb35bc4b51855f9b589131a7efbbe23578dfa6235ed27
3
+ size 3323