name: bert_sequence_labeling config_type: model task: sequence_labeling num_labels: 23 id2label: 0: P 1: Ne 2: PRO 3: CONJ 4: 'N' 5: PUNC 6: Pe 7: ADV 8: V 9: AJ 10: AJe 11: DET 12: POSTP 13: NUM 14: DETe 15: NUMe 16: PROe 17: ADVe 18: RES 19: CL 20: INT 21: CONJe 22: RESe vocab_size: 42000 hidden_size: 768 num_hidden_layers: 12 num_attention_heads: 12 intermediate_size: 3072 hidden_act: gelu hidden_dropout_prob: 0.1 attention_probs_dropout_prob: 0.1 max_position_embeddings: 512 type_vocab_size: 2 initializer_range: 0.02 layer_norm_eps: 1.0e-12 pad_token_id: 0 position_embedding_type: absolute use_cache: true prediction_skip_tokens: - '[SEP]' - '[CLS]'