datdq commited on
Commit
c1bfb0c
1 Parent(s): 31c1b3e

Training in progress, step 25

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "openai/whisper-small",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
  "apply_spec_augment": false,
@@ -25,20 +25,7 @@
25
  "encoder_layerdrop": 0.0,
26
  "encoder_layers": 12,
27
  "eos_token_id": 50257,
28
- "forced_decoder_ids": [
29
- [
30
- 1,
31
- 50259
32
- ],
33
- [
34
- 2,
35
- 50359
36
- ],
37
- [
38
- 3,
39
- 50363
40
- ]
41
- ],
42
  "init_std": 0.02,
43
  "is_encoder_decoder": true,
44
  "mask_feature_length": 10,
@@ -50,99 +37,12 @@
50
  "max_length": 448,
51
  "max_source_positions": 1500,
52
  "max_target_positions": 448,
 
53
  "model_type": "whisper",
54
  "num_hidden_layers": 12,
55
  "num_mel_bins": 80,
56
  "pad_token_id": 50257,
57
  "scale_embedding": false,
58
- "suppress_tokens": [
59
- 1,
60
- 2,
61
- 7,
62
- 8,
63
- 9,
64
- 10,
65
- 14,
66
- 25,
67
- 26,
68
- 27,
69
- 28,
70
- 29,
71
- 31,
72
- 58,
73
- 59,
74
- 60,
75
- 61,
76
- 62,
77
- 63,
78
- 90,
79
- 91,
80
- 92,
81
- 93,
82
- 359,
83
- 503,
84
- 522,
85
- 542,
86
- 873,
87
- 893,
88
- 902,
89
- 918,
90
- 922,
91
- 931,
92
- 1350,
93
- 1853,
94
- 1982,
95
- 2460,
96
- 2627,
97
- 3246,
98
- 3253,
99
- 3268,
100
- 3536,
101
- 3846,
102
- 3961,
103
- 4183,
104
- 4667,
105
- 6585,
106
- 6647,
107
- 7273,
108
- 9061,
109
- 9383,
110
- 10428,
111
- 10929,
112
- 11938,
113
- 12033,
114
- 12331,
115
- 12562,
116
- 13793,
117
- 14157,
118
- 14635,
119
- 15265,
120
- 15618,
121
- 16553,
122
- 16604,
123
- 18362,
124
- 18956,
125
- 20075,
126
- 21675,
127
- 22520,
128
- 26130,
129
- 26161,
130
- 26435,
131
- 28279,
132
- 29464,
133
- 31650,
134
- 32302,
135
- 32470,
136
- 36865,
137
- 42863,
138
- 47425,
139
- 49870,
140
- 50254,
141
- 50258,
142
- 50360,
143
- 50361,
144
- 50362
145
- ],
146
  "torch_dtype": "float32",
147
  "transformers_version": "4.27.4",
148
  "use_cache": true,
 
1
  {
2
+ "_name_or_path": "vinai/PhoWhisper-small",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
  "apply_spec_augment": false,
 
25
  "encoder_layerdrop": 0.0,
26
  "encoder_layers": 12,
27
  "eos_token_id": 50257,
28
+ "forced_decoder_ids": null,
 
 
 
 
 
 
 
 
 
 
 
 
 
29
  "init_std": 0.02,
30
  "is_encoder_decoder": true,
31
  "mask_feature_length": 10,
 
37
  "max_length": 448,
38
  "max_source_positions": 1500,
39
  "max_target_positions": 448,
40
+ "median_filter_width": 7,
41
  "model_type": "whisper",
42
  "num_hidden_layers": 12,
43
  "num_mel_bins": 80,
44
  "pad_token_id": 50257,
45
  "scale_embedding": false,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
46
  "torch_dtype": "float32",
47
  "transformers_version": "4.27.4",
48
  "use_cache": true,
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9f806b015c63d488a59761bf4abbdc4a58927c9ad7547d31105dbddde3257441
3
  size 967103046
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:722116668829ef5ae746f74a5408a227fd91e7fec31998ab1d388c9f047d4177
3
  size 967103046
runs/May22_14-08-18_ubuntu-vtp/events.out.tfevents.1716361702.ubuntu-vtp.45695.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:24b59ba6bd583bf013f82051ead0b5f61d1b7552502495819e7a07607f70968f
3
- size 6178
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1eabd317477cfbfe1436396e52dd73a71d71de13b854a9c6373449e808746145
3
+ size 6332
runs/May22_14-30-39_ubuntu-vtp/1716363044.0573888/events.out.tfevents.1716363044.ubuntu-vtp.50626.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4ed640418f8f86ab42055d4772b13b991b9a2eace2e273a8b91ccd9685d3fc88
3
+ size 6101
runs/May22_14-30-39_ubuntu-vtp/events.out.tfevents.1716363044.ubuntu-vtp.50626.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16bdd3e87648282a065ca960e658437ca76a29e3f888faac601043309479b0bd
3
+ size 4781
runs/May22_14-33-54_ubuntu-vtp/1716363238.6607683/events.out.tfevents.1716363238.ubuntu-vtp.51220.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f32670df495dcebc4f1e2f4938f10fd3e4a9fe1b423881f78c5f05f1baf05d3f
3
+ size 6101
runs/May22_14-33-54_ubuntu-vtp/events.out.tfevents.1716363238.ubuntu-vtp.51220.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1a9b42307dc62a8a0afbf8ed82156ddff3229f8598b673d0b30d08b5b15a6eef
3
+ size 4781
runs/May22_14-37-27_ubuntu-vtp/1716363450.9310381/events.out.tfevents.1716363450.ubuntu-vtp.52180.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bd2d9b807563f953b918753d5df2285b7cd7c964ecd1535e3206f42cfaef43d9
3
+ size 6101
runs/May22_14-37-27_ubuntu-vtp/events.out.tfevents.1716363450.ubuntu-vtp.52180.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:12d538450ddea83fb87fee54d780f6b70cf5d562deb23ff232cb1597635ab361
3
+ size 4781
runs/May22_14-38-20_ubuntu-vtp/1716363505.065029/events.out.tfevents.1716363505.ubuntu-vtp.52436.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1b739618a98f7367a64f926d602c35fceae16c3e80adbe4306c3de6df5acc5a1
3
+ size 6101
runs/May22_14-38-20_ubuntu-vtp/events.out.tfevents.1716363505.ubuntu-vtp.52436.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4ec354052dd06ac1cc13a37648422933892fe04463e75e4fcd4c5bdf406ab051
3
+ size 4781
runs/May22_14-43-58_ubuntu-vtp/1716363841.8733983/events.out.tfevents.1716363841.ubuntu-vtp.53000.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:72b2c6d5bf73e55eba0f998532c7829aa2635d042056a0bdcd54c05423fc6525
3
+ size 6101
runs/May22_14-43-58_ubuntu-vtp/events.out.tfevents.1716363841.ubuntu-vtp.53000.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a8aa616f25720a38590bee5fdd2c9c157ce4fa7c8d78fb739011ecfe7664df21
3
+ size 5245
tokenizer_config.json CHANGED
@@ -23,7 +23,7 @@
23
  "pad_token": null,
24
  "processor_class": "WhisperProcessor",
25
  "return_attention_mask": false,
26
- "special_tokens_map_file": "/home/datdq1/.cache/huggingface/hub/models--vinai--PhoWhisper-medium/snapshots/1e97beae9b5dde44eb29c84a3cd2d39f3bc03d8e/special_tokens_map.json",
27
  "tokenizer_class": "WhisperTokenizer",
28
  "unk_token": {
29
  "__type": "AddedToken",
 
23
  "pad_token": null,
24
  "processor_class": "WhisperProcessor",
25
  "return_attention_mask": false,
26
+ "special_tokens_map_file": "/home/datdq1/.cache/huggingface/hub/models--vinai--PhoWhisper-small/snapshots/d44d00752724b8b28c5b66517b4720b73062a26c/special_tokens_map.json",
27
  "tokenizer_class": "WhisperTokenizer",
28
  "unk_token": {
29
  "__type": "AddedToken",
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0bf3824c78ff69e63cfd28ed25af094b551e9befc036d7ecc1b9431cb2e5ac8f
3
  size 4152
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7668b3f0de1ac2122fdbe7c4186a27f9c18c58d420595c44b487dc844ef49947
3
  size 4152