shljessie commited on
Commit
fbe95ed
1 Parent(s): 850f4a2

Training in progress, step 10

Browse files
config.json CHANGED
@@ -25,12 +25,7 @@
25
  "encoder_layerdrop": 0.0,
26
  "encoder_layers": 4,
27
  "eos_token_id": 50256,
28
- "forced_decoder_ids": [
29
- [
30
- 1,
31
- 50362
32
- ]
33
- ],
34
  "init_std": 0.02,
35
  "is_encoder_decoder": true,
36
  "mask_feature_length": 10,
@@ -48,101 +43,10 @@
48
  "num_mel_bins": 80,
49
  "pad_token_id": 50256,
50
  "scale_embedding": false,
51
- "suppress_tokens": [
52
- 1,
53
- 2,
54
- 7,
55
- 8,
56
- 9,
57
- 10,
58
- 14,
59
- 25,
60
- 26,
61
- 27,
62
- 28,
63
- 29,
64
- 31,
65
- 58,
66
- 59,
67
- 60,
68
- 61,
69
- 62,
70
- 63,
71
- 90,
72
- 91,
73
- 92,
74
- 93,
75
- 357,
76
- 366,
77
- 438,
78
- 532,
79
- 685,
80
- 705,
81
- 796,
82
- 930,
83
- 1058,
84
- 1220,
85
- 1267,
86
- 1279,
87
- 1303,
88
- 1343,
89
- 1377,
90
- 1391,
91
- 1635,
92
- 1782,
93
- 1875,
94
- 2162,
95
- 2361,
96
- 2488,
97
- 3467,
98
- 4008,
99
- 4211,
100
- 4600,
101
- 4808,
102
- 5299,
103
- 5855,
104
- 6329,
105
- 7203,
106
- 9609,
107
- 9959,
108
- 10563,
109
- 10786,
110
- 11420,
111
- 11709,
112
- 11907,
113
- 13163,
114
- 13697,
115
- 13700,
116
- 14808,
117
- 15306,
118
- 16410,
119
- 16791,
120
- 17992,
121
- 19203,
122
- 19510,
123
- 20724,
124
- 22305,
125
- 22935,
126
- 27007,
127
- 30109,
128
- 30420,
129
- 33409,
130
- 34949,
131
- 40283,
132
- 40493,
133
- 40549,
134
- 47282,
135
- 49146,
136
- 50257,
137
- 50357,
138
- 50358,
139
- 50359,
140
- 50360,
141
- 50361
142
- ],
143
  "torch_dtype": "float32",
144
  "transformers_version": "4.40.1",
145
- "use_cache": true,
146
  "use_weighted_layer_sum": false,
147
  "vocab_size": 51864
148
  }
 
25
  "encoder_layerdrop": 0.0,
26
  "encoder_layers": 4,
27
  "eos_token_id": 50256,
28
+ "forced_decoder_ids": null,
 
 
 
 
 
29
  "init_std": 0.02,
30
  "is_encoder_decoder": true,
31
  "mask_feature_length": 10,
 
43
  "num_mel_bins": 80,
44
  "pad_token_id": 50256,
45
  "scale_embedding": false,
46
+ "suppress_tokens": [],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
47
  "torch_dtype": "float32",
48
  "transformers_version": "4.40.1",
49
+ "use_cache": false,
50
  "use_weighted_layer_sum": false,
51
  "vocab_size": 51864
52
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2a98c0309ce481bf48fcb369d90bb851dc8629ae865719a5e07b80264c32704c
3
  size 151060136
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:41ead89707810f8871cedaef20e582ec89bb5da6e51d134226ec521cc24d6786
3
  size 151060136
runs/Apr30_17-58-31_5935be937a15/events.out.tfevents.1714499999.5935be937a15.347.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7a1609af13abdb11a678528180f6fddfe5758507f08cae5c972cc19fba673ef9
3
+ size 6287
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:96b68f22912e1a1abdaccc05cbd0cfdc67d7f925507cda2e400df026de976c82
3
  size 5176
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9dab1121b3243b8f1a3fb9c56bcd359bb8ab8d56d11887da0cc02c1e5abe61e5
3
  size 5176