{ "_name_or_path": "deberta-v3-large-bc5cdr", "architectures": [ "DebertaV2ForTokenClassification" ], "attention_probs_dropout_prob": 0.1, "crf_state_dict": { "_constraint_mask": [ [ 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0 ], [ 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0 ], [ 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0 ], [ 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0 ], [ 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0 ], [ 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ], "end_transitions": [ 1.0797829627990723, 2.337775468826294, -0.5702832341194153, -0.09888350963592529, 0.44394630193710327 ], "start_transitions": [ 1.050474762916565, -0.9998553395271301, 0.6489607691764832, -1.3081002235412598, 0.8147163391113281 ], "transitions": [ [ 0.38620230555534363, 0.46338361501693726, -0.2693702280521393, -0.027202147990465164, 0.2154194712638855 ], [ -0.24631035327911377, 0.7250676155090332, -0.05480072647333145, 0.05186671018600464, 0.564211368560791 ], [ 0.12546269595623016, -0.22872965037822723, -0.1702175885438919, -0.40474018454551697, 0.6381494402885437 ], [ 0.5764445066452026, 0.6380641460418701, -0.6308369636535645, 0.9163250923156738, 0.7476255893707275 ], [ -0.14198975265026093, 0.41760584712028503, 0.3615993857383728, 0.5377429723739624, 0.029010772705078125 ] ] }, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 1024, "id2label": { "0": "O", "1": "B-Chemical", "2": "B-Disease", "3": "I-Disease", "4": "I-Chemical" }, "initializer_range": 0.02, "intermediate_size": 4096, "label2id": { "B-Chemical": 1, "B-Disease": 2, "I-Chemical": 4, "I-Disease": 3, "O": 0 }, "layer_norm_eps": 1e-07, "max_position_embeddings": 512, "max_relative_positions": -1, "model_type": "deberta-v2", "norm_rel_ebd": "layer_norm", "num_attention_heads": 16, "num_hidden_layers": 24, "pad_token_id": 0, "pooler_dropout": 0, "pooler_hidden_act": "gelu", "pooler_hidden_size": 1024, "pos_att_type": [ "p2c", "c2p" ], "position_biased_input": false, "position_buckets": 256, "relative_attention": true, "share_att_key": true, "torch_dtype": "float32", "transformers_version": "4.11.3", "type_vocab_size": 0, "vocab_size": 128100 }