{ "_name_or_path": "microsoft/deberta-base", "architectures": [ "DebertaForTokenClassification" ], "attention_probs_dropout_prob": 0.1, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "id2label": { "0": "O", "1": "I-pickup_state", "2": "I-pickup_cap", "3": "I-measures", "4": "I-package_type", "5": "B-pickup_state", "6": "B-total_weight", "7": "I-company", "8": "B-total_quantity", "9": "B-pickup_port", "10": "B-company", "11": "I-commodity", "12": "I-delivery_state", "13": "I-quantity", "14": "I-pickup_location", "15": "B-quantity", "16": "B-commodity", "17": "B-measures", "18": "B-pickup_cap", "19": "B-stackable", "20": "B-weight", "21": "B-package_type", "22": "I-pickup_port", "23": "I-stackable", "24": "B-delivery_state", "25": "B-total_volume", "26": "I-total_weight", "27": "B-delivery_port", "28": "B-incoterms", "29": "I-delivery_location", "30": "B-volume", "31": "I-delivery_port", "32": "B-delivery_cap", "33": "I-total_volume", "34": "I-volume", "35": "I-weight", "36": "I-incoterms", "37": "B-pickup_location", "38": "B-delivery_location" }, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": { "B-commodity": 16, "B-company": 10, "B-delivery_cap": 32, "B-delivery_location": 38, "B-delivery_port": 27, "B-delivery_state": 24, "B-incoterms": 28, "B-measures": 17, "B-package_type": 21, "B-pickup_cap": 18, "B-pickup_location": 37, "B-pickup_port": 9, "B-pickup_state": 5, "B-quantity": 15, "B-stackable": 19, "B-total_quantity": 8, "B-total_volume": 25, "B-total_weight": 6, "B-volume": 30, "B-weight": 20, "I-commodity": 11, "I-company": 7, "I-delivery_location": 29, "I-delivery_port": 31, "I-delivery_state": 12, "I-incoterms": 36, "I-measures": 3, "I-package_type": 4, "I-pickup_cap": 2, "I-pickup_location": 14, "I-pickup_port": 22, "I-pickup_state": 1, "I-quantity": 13, "I-stackable": 23, "I-total_volume": 33, "I-total_weight": 26, "I-volume": 34, "I-weight": 35, "O": 0 }, "layer_norm_eps": 1e-07, "max_position_embeddings": 512, "max_relative_positions": -1, "model_type": "deberta", "num_attention_heads": 12, "num_hidden_layers": 12, "pad_token_id": 0, "pooler_dropout": 0, "pooler_hidden_act": "gelu", "pooler_hidden_size": 768, "pos_att_type": [ "c2p", "p2c" ], "position_biased_input": false, "relative_attention": true, "torch_dtype": "float32", "transformers_version": "4.38.2", "type_vocab_size": 0, "vocab_size": 50265 }