name: bert_text_classification config_type: model task: TEXT_CLASSIFICATION num_labels: 3 id2label: 0: negative 1: positive 2: neutral vocab_size: 42000 hidden_size: 768 num_hidden_layers: 12 num_attention_heads: 12 intermediate_size: 3072 hidden_act: gelu hidden_dropout_prob: 0.1 attention_probs_dropout_prob: 0.1 max_position_embeddings: 512 type_vocab_size: 2 initializer_range: 0.02 layer_norm_eps: 1.0e-12 pad_token_id: 0 position_embedding_type: absolute use_cache: true