anton-l's picture
anton-l HF staff
Update hidden_act and layer_norm_eps
71900e6
{
"activation_dropout": 0.1,
"apply_spec_augment": true,
"architectures": [
"SEWDForCTC"
],
"attention_dropout": 0.0,
"bos_token_id": 1,
"classifier_proj_size": 256,
"conv_bias": false,
"conv_dim": [
64,
128,
128,
128,
128,
256,
256,
256,
256,
512,
512,
512,
512
],
"conv_kernel": [
10,
3,
1,
3,
1,
3,
1,
3,
1,
2,
1,
2,
1
],
"conv_stride": [
5,
2,
1,
2,
1,
2,
1,
2,
1,
2,
1,
2,
1
],
"ctc_loss_reduction": "mean",
"ctc_zero_infinity": false,
"eos_token_id": 2,
"feat_extract_activation": "gelu",
"feat_extract_norm": "group",
"feat_proj_dropout": 0.0,
"feature_extractor_type": "Wav2Vec2FeatureExtractor",
"final_dropout": 0.0,
"hidden_act": "gelu_python",
"hidden_dropout": 0.0,
"hidden_size": 512,
"initializer_range": 0.02,
"intermediate_size": 2048,
"layer_norm_eps": 1e-07,
"layerdrop": 0.1,
"mask_feature_length": 64,
"mask_feature_prob": 0.5,
"mask_time_length": 10,
"mask_time_prob": 0.65,
"max_position_embeddings": 512,
"model_type": "sew-d",
"norm_rel_ebd": "layer_norm",
"num_attention_heads": 8,
"num_conv_pos_embedding_groups": 16,
"num_conv_pos_embeddings": 31,
"num_feat_extract_layers": 13,
"num_hidden_layers": 24,
"pad_token_id": 0,
"pos_att_type": [
"p2c",
"c2p"
],
"position_biased_input": false,
"position_buckets": 256,
"relative_attention": true,
"share_att_key": true,
"squeeze_factor": 2,
"tokenizer_class": "Wav2Vec2CTCTokenizer",
"torch_dtype": "float32",
"transformers_version": "4.12.0.dev0",
"use_weighted_layer_sum": false,
"vocab_size": 32,
"feature_layer_norm_eps": 1e-05
}