name: bert_sequence_labeling config_type: model task: sequence_labeling num_labels: 13 id2label: 0: O 1: B-pro 2: I-pro 3: B-pers 4: I-pers 5: B-org 6: I-org 7: B-loc 8: I-loc 9: B-fac 10: I-fac 11: B-event 12: I-event vocab_size: 42000 hidden_size: 768 num_hidden_layers: 12 num_attention_heads: 12 intermediate_size: 3072 hidden_act: gelu hidden_dropout_prob: 0.1 attention_probs_dropout_prob: 0.1 max_position_embeddings: 512 type_vocab_size: 2 initializer_range: 0.02 layer_norm_eps: 1.0e-12 pad_token_id: 0 position_embedding_type: absolute use_cache: true prediction_skip_tokens: - '[SEP]' - '[CLS]'