ylacombe HF staff commited on
Commit
fada991
1 Parent(s): da4c346
Files changed (3) hide show
  1. config.json +145 -0
  2. generation_config.json +290 -0
  3. pytorch_model.bin +3 -0
config.json ADDED
@@ -0,0 +1,145 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation_dropout": 0.0,
3
+ "activation_function": "relu",
4
+ "adaptor_dropout": 0.1,
5
+ "adaptor_kernel_size": 8,
6
+ "adaptor_layer_norm": true,
7
+ "adaptor_stride": 8,
8
+ "add_adapter": true,
9
+ "architectures": [
10
+ "SeamlessM4TModel"
11
+ ],
12
+ "attention_dropout": 0.1,
13
+ "bos_token_id": 2,
14
+ "conv_bias": false,
15
+ "conv_depthwise_kernel_size": 31,
16
+ "conv_dim": [
17
+ 512,
18
+ 512,
19
+ 512,
20
+ 512,
21
+ 512,
22
+ 512,
23
+ 160
24
+ ],
25
+ "conv_kernel": [
26
+ 10,
27
+ 3,
28
+ 3,
29
+ 3,
30
+ 3,
31
+ 2,
32
+ 2
33
+ ],
34
+ "conv_stride": [
35
+ 5,
36
+ 2,
37
+ 2,
38
+ 2,
39
+ 2,
40
+ 2,
41
+ 2
42
+ ],
43
+ "decoder_attention_heads": 16,
44
+ "decoder_ffn_dim": 4096,
45
+ "decoder_layerdrop": 0.05,
46
+ "decoder_layers": 12,
47
+ "decoder_start_token_id": 3,
48
+ "dropout": 0.1,
49
+ "encoder_attention_heads": 16,
50
+ "encoder_ffn_dim": 4096,
51
+ "encoder_layerdrop": 0.05,
52
+ "encoder_layers": 12,
53
+ "eos_token_id": 3,
54
+ "hidden_act": "gelu",
55
+ "hidden_size": 1024,
56
+ "init_std": 0.02,
57
+ "initializer_range": 0.02,
58
+ "is_encoder_decoder": true,
59
+ "lang_embed_dim": 256,
60
+ "layer_norm_eps": 1e-05,
61
+ "layerdrop": 0.1,
62
+ "leaky_relu_slope": 0.1,
63
+ "max_new_tokens": 256,
64
+ "max_position_embeddings": 4096,
65
+ "max_source_positions": 4096,
66
+ "model_in_dim": 1792,
67
+ "model_type": "seamless_m4t",
68
+ "num_adapter_layers": 1,
69
+ "num_attention_heads": 16,
70
+ "num_conv_pos_embedding_groups": 16,
71
+ "num_conv_pos_embeddings": 128,
72
+ "num_hidden_layers": 12,
73
+ "output_hidden_size": null,
74
+ "pad_token_id": 0,
75
+ "position_embeddings_type": "relative",
76
+ "resblock_dilation_sizes": [
77
+ [
78
+ 1,
79
+ 3,
80
+ 5
81
+ ],
82
+ [
83
+ 1,
84
+ 3,
85
+ 5
86
+ ],
87
+ [
88
+ 1,
89
+ 3,
90
+ 5
91
+ ]
92
+ ],
93
+ "resblock_kernel_sizes": [
94
+ 3,
95
+ 7,
96
+ 11
97
+ ],
98
+ "rotary_embedding_base": 10000,
99
+ "sampling_rate": 16000,
100
+ "scale_embedding": true,
101
+ "speech_encoder_attention_heads": 16,
102
+ "speech_encoder_dropout": 0.0,
103
+ "speech_encoder_hidden_act": "swish",
104
+ "speech_encoder_intermediate_size": 4096,
105
+ "speech_encoder_layers": 12,
106
+ "spkr_embed_dim": 256,
107
+ "t2u_bos_token_id": 0,
108
+ "t2u_decoder_attention_heads": 16,
109
+ "t2u_decoder_ffn_dim": 8192,
110
+ "t2u_decoder_layers": 4,
111
+ "t2u_decoder_start_token_id": 2,
112
+ "t2u_encoder_attention_heads": 16,
113
+ "t2u_encoder_ffn_dim": 8192,
114
+ "t2u_encoder_layers": 4,
115
+ "t2u_eos_token_id": 2,
116
+ "t2u_max_new_tokens": 1024,
117
+ "t2u_num_langs": 38,
118
+ "t2u_pad_token_id": 1,
119
+ "torch_dtype": "float32",
120
+ "transformers_version": "4.33.0.dev0",
121
+ "unit_embed_dim": 1280,
122
+ "unit_hifi_gan_vocab_size": 10000,
123
+ "unit_vocab_size": 10082,
124
+ "upsample_initial_channel": 512,
125
+ "upsample_kernel_sizes": [
126
+ 11,
127
+ 8,
128
+ 8,
129
+ 4,
130
+ 4
131
+ ],
132
+ "upsample_rates": [
133
+ 5,
134
+ 4,
135
+ 4,
136
+ 2,
137
+ 2
138
+ ],
139
+ "use_cache": true,
140
+ "var_pred_dropout": 0.5,
141
+ "variance_predictor_kernel_size": 3,
142
+ "vocab_size": 256206,
143
+ "vocoder_num_langs": 36,
144
+ "vocoder_num_spkrs": 200
145
+ }
generation_config.json ADDED
@@ -0,0 +1,290 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 2,
3
+ "decoder_start_token_id": 3,
4
+ "eos_token_id": 3,
5
+ "max_new_tokens": 256,
6
+ "pad_token_id": 0,
7
+ "t2u_lang_code_to_id": {
8
+ "arb": 0,
9
+ "ben": 1,
10
+ "cat": 2,
11
+ "ces": 3,
12
+ "cmn": 4,
13
+ "cym": 5,
14
+ "dan": 6,
15
+ "deu": 7,
16
+ "eng": 8,
17
+ "est": 9,
18
+ "fin": 10,
19
+ "fra": 11,
20
+ "hin": 12,
21
+ "ind": 13,
22
+ "ita": 14,
23
+ "jpn": 15,
24
+ "kan": 16,
25
+ "kor": 17,
26
+ "mlt": 18,
27
+ "nld": 19,
28
+ "pes": 20,
29
+ "pol": 21,
30
+ "por": 22,
31
+ "ron": 23,
32
+ "rus": 24,
33
+ "slk": 25,
34
+ "spa": 26,
35
+ "swe": 27,
36
+ "swh": 28,
37
+ "tam": 29,
38
+ "tel": 30,
39
+ "tgl": 31,
40
+ "tha": 32,
41
+ "tur": 33,
42
+ "ukr": 34,
43
+ "urd": 35,
44
+ "uzn": 36,
45
+ "vie": 37
46
+ },
47
+ "text_decoder_lang_to_code_id": {
48
+ "ace": 256001,
49
+ "ace_Latn": 256002,
50
+ "acm": 256003,
51
+ "acq": 256004,
52
+ "aeb": 256005,
53
+ "afr": 256006,
54
+ "ajp": 256007,
55
+ "aka": 256008,
56
+ "als": 256162,
57
+ "amh": 256009,
58
+ "apc": 256010,
59
+ "arb": 256011,
60
+ "ars": 256012,
61
+ "ary": 256013,
62
+ "arz": 256014,
63
+ "asm": 256015,
64
+ "ast": 256016,
65
+ "awa": 256017,
66
+ "ayr": 256018,
67
+ "azb": 256019,
68
+ "azj": 256020,
69
+ "bak": 256021,
70
+ "bam": 256022,
71
+ "ban": 256023,
72
+ "bel": 256024,
73
+ "bem": 256025,
74
+ "ben": 256026,
75
+ "bho": 256027,
76
+ "bjn": 256028,
77
+ "bjn_Latn": 256029,
78
+ "bod": 256030,
79
+ "bos": 256031,
80
+ "bug": 256032,
81
+ "bul": 256033,
82
+ "cat": 256034,
83
+ "ceb": 256035,
84
+ "ces": 256036,
85
+ "cjk": 256037,
86
+ "ckb": 256038,
87
+ "cmn": 256200,
88
+ "cmn_Hant": 256201,
89
+ "crh": 256039,
90
+ "cym": 256040,
91
+ "dan": 256041,
92
+ "deu": 256042,
93
+ "dik": 256043,
94
+ "dyu": 256044,
95
+ "dzo": 256045,
96
+ "ell": 256046,
97
+ "eng": 256047,
98
+ "epo": 256048,
99
+ "est": 256049,
100
+ "eus": 256050,
101
+ "ewe": 256051,
102
+ "fao": 256052,
103
+ "fij": 256054,
104
+ "fin": 256055,
105
+ "fon": 256056,
106
+ "fra": 256057,
107
+ "fur": 256058,
108
+ "fuv": 256059,
109
+ "gaz": 256135,
110
+ "gla": 256060,
111
+ "gle": 256061,
112
+ "glg": 256062,
113
+ "grn": 256063,
114
+ "guj": 256064,
115
+ "hat": 256065,
116
+ "hau": 256066,
117
+ "heb": 256067,
118
+ "hin": 256068,
119
+ "hne": 256069,
120
+ "hrv": 256070,
121
+ "hun": 256071,
122
+ "hye": 256072,
123
+ "ibo": 256073,
124
+ "ilo": 256074,
125
+ "ind": 256075,
126
+ "isl": 256076,
127
+ "ita": 256077,
128
+ "jav": 256078,
129
+ "jpn": 256079,
130
+ "kab": 256080,
131
+ "kac": 256081,
132
+ "kam": 256082,
133
+ "kan": 256083,
134
+ "kas": 256084,
135
+ "kas_Deva": 256085,
136
+ "kat": 256086,
137
+ "kaz": 256089,
138
+ "kbp": 256090,
139
+ "kea": 256091,
140
+ "khk": 256122,
141
+ "khm": 256092,
142
+ "kik": 256093,
143
+ "kin": 256094,
144
+ "kir": 256095,
145
+ "kmb": 256096,
146
+ "kmr": 256099,
147
+ "knc": 256087,
148
+ "knc_Latn": 256088,
149
+ "kon": 256097,
150
+ "kor": 256098,
151
+ "lao": 256100,
152
+ "lij": 256102,
153
+ "lim": 256103,
154
+ "lin": 256104,
155
+ "lit": 256105,
156
+ "lmo": 256106,
157
+ "ltg": 256107,
158
+ "ltz": 256108,
159
+ "lua": 256109,
160
+ "lug": 256110,
161
+ "luo": 256111,
162
+ "lus": 256112,
163
+ "lvs": 256101,
164
+ "mag": 256113,
165
+ "mai": 256114,
166
+ "mal": 256115,
167
+ "mar": 256116,
168
+ "min": 256117,
169
+ "mkd": 256118,
170
+ "mlt": 256120,
171
+ "mni": 256121,
172
+ "mos": 256123,
173
+ "mri": 256124,
174
+ "mya": 256126,
175
+ "nld": 256127,
176
+ "nno": 256128,
177
+ "nob": 256129,
178
+ "npi": 256130,
179
+ "nso": 256131,
180
+ "nus": 256132,
181
+ "nya": 256133,
182
+ "oci": 256134,
183
+ "ory": 256136,
184
+ "pag": 256137,
185
+ "pan": 256138,
186
+ "pap": 256139,
187
+ "pbt": 256143,
188
+ "pes": 256053,
189
+ "plt": 256119,
190
+ "pol": 256140,
191
+ "por": 256141,
192
+ "prs": 256142,
193
+ "quy": 256144,
194
+ "ron": 256145,
195
+ "run": 256146,
196
+ "rus": 256147,
197
+ "sag": 256148,
198
+ "san": 256149,
199
+ "sat": 256150,
200
+ "scn": 256151,
201
+ "shn": 256152,
202
+ "sin": 256153,
203
+ "slk": 256154,
204
+ "slv": 256155,
205
+ "smo": 256156,
206
+ "sna": 256157,
207
+ "snd": 256158,
208
+ "som": 256159,
209
+ "sot": 256160,
210
+ "spa": 256161,
211
+ "srd": 256163,
212
+ "srp": 256164,
213
+ "ssw": 256165,
214
+ "sun": 256166,
215
+ "swe": 256167,
216
+ "swh": 256168,
217
+ "szl": 256169,
218
+ "tam": 256170,
219
+ "taq": 256177,
220
+ "taq_Tfng": 256178,
221
+ "tat": 256171,
222
+ "tel": 256172,
223
+ "tgk": 256173,
224
+ "tgl": 256174,
225
+ "tha": 256175,
226
+ "tir": 256176,
227
+ "tpi": 256179,
228
+ "tsn": 256180,
229
+ "tso": 256181,
230
+ "tuk": 256182,
231
+ "tum": 256183,
232
+ "tur": 256184,
233
+ "twi": 256185,
234
+ "tzm": 256186,
235
+ "uig": 256187,
236
+ "ukr": 256188,
237
+ "umb": 256189,
238
+ "urd": 256190,
239
+ "uzn": 256191,
240
+ "vec": 256192,
241
+ "vie": 256193,
242
+ "war": 256194,
243
+ "wol": 256195,
244
+ "xho": 256196,
245
+ "ydd": 256197,
246
+ "yor": 256198,
247
+ "yue": 256199,
248
+ "zsm": 256125,
249
+ "zul": 256202
250
+ },
251
+ "transformers_version": "4.33.0.dev0",
252
+ "vocoder_lang_code_to_id": {
253
+ "arb": 0,
254
+ "ben": 1,
255
+ "cat": 2,
256
+ "ces": 3,
257
+ "cmn": 4,
258
+ "cym": 5,
259
+ "dan": 6,
260
+ "deu": 7,
261
+ "eng": 8,
262
+ "est": 9,
263
+ "fin": 10,
264
+ "fra": 11,
265
+ "hin": 12,
266
+ "ind": 13,
267
+ "ita": 14,
268
+ "jpn": 15,
269
+ "kor": 16,
270
+ "mlt": 17,
271
+ "nld": 18,
272
+ "pes": 19,
273
+ "pol": 20,
274
+ "por": 21,
275
+ "ron": 22,
276
+ "rus": 23,
277
+ "slk": 24,
278
+ "spa": 25,
279
+ "swe": 26,
280
+ "swh": 27,
281
+ "tel": 28,
282
+ "tgl": 29,
283
+ "tha": 30,
284
+ "tur": 31,
285
+ "ukr": 32,
286
+ "urd": 33,
287
+ "uzn": 34,
288
+ "vie": 35
289
+ }
290
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:395d43577e8eef756f27da4912d578b10f23a93a79184893cfb534e392ec7ad2
3
+ size 4838401068