File size: 2,141 Bytes
bafe36a a1228af bafe36a a1228af bafe36a a1228af bafe36a a1228af bafe36a a1228af bafe36a a1228af bafe36a a1228af bafe36a a1228af bafe36a a1228af bafe36a a1228af bafe36a a1228af bafe36a a1228af bafe36a a1228af bafe36a a1228af bafe36a a1228af bafe36a a1228af bafe36a a1228af bafe36a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 |
{
"_name_or_path": "facebook/w2v-bert-2.0",
"activation_dropout": 0.006,
"adapter_act": "relu",
"adapter_kernel_size": 3,
"adapter_stride": 2,
"add_adapter": false,
"apply_spec_augment": false,
"architectures": [
"Wav2Vec2BertForSequenceClassification"
],
"attention_dropout": 0.0094,
"bos_token_id": 1,
"classifier_proj_size": 768,
"codevector_dim": 768,
"conformer_conv_dropout": 0.1,
"contrastive_logits_temperature": 0.1,
"conv_depthwise_kernel_size": 31,
"ctc_loss_reduction": "sum",
"ctc_zero_infinity": false,
"diversity_loss_weight": 0.1,
"eos_token_id": 2,
"feat_proj_dropout": 0.0,
"feat_quantizer_dropout": 0.0,
"feature_projection_input_dim": 160,
"final_dropout": 0.0005,
"hidden_act": "swish",
"hidden_dropout": 0.004,
"hidden_size": 1024,
"id2label": {
"0": "LABEL_0",
"1": "LABEL_1",
"2": "LABEL_2",
"3": "LABEL_3"
},
"initializer_range": 0.02,
"intermediate_size": 4096,
"label2id": {
"LABEL_0": 0,
"LABEL_1": 1,
"LABEL_2": 2,
"LABEL_3": 3
},
"layer_norm_eps": 1e-05,
"layerdrop": 0.0005,
"left_max_position_embeddings": 64,
"mask_feature_length": 5,
"mask_feature_min_masks": 2,
"mask_feature_prob": 0.0075,
"mask_time_length": 5,
"mask_time_min_masks": 2,
"mask_time_prob": 0.0085,
"max_source_positions": 5000,
"model_type": "wav2vec2-bert",
"num_adapter_layers": 1,
"num_attention_heads": 16,
"num_codevector_groups": 2,
"num_codevectors_per_group": 320,
"num_hidden_layers": 24,
"num_negatives": 100,
"output_hidden_size": 1024,
"pad_token_id": 0,
"position_embeddings_type": "relative_key",
"proj_codevector_dim": 768,
"right_max_position_embeddings": 8,
"rotary_embedding_base": 10000,
"tdnn_dilation": [
1,
2,
3,
1,
1
],
"tdnn_dim": [
512,
512,
512,
512,
1500
],
"tdnn_kernel": [
5,
3,
3,
1,
1
],
"torch_dtype": "float32",
"transformers_version": "4.37.0",
"use_intermediate_ffn_before_adapter": false,
"use_weighted_layer_sum": false,
"vocab_size": null,
"xvector_output_dim": 512
}
|