Md Mushfiqur Rahman
Upload with huggingface_hub
3bf4c69
{
"_name_or_path": "bert-base-cased",
"architectures": [
"BertForBiaffineParsing"
],
"attention_probs_dropout_prob": 0.1,
"classifier_dropout": null,
"gradient_checkpointing": false,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "_",
"1": "acl",
"2": "advcl",
"3": "advmod",
"4": "amod",
"5": "appos",
"6": "aux",
"7": "case",
"8": "cc",
"9": "ccomp",
"10": "clf",
"11": "compound",
"12": "conj",
"13": "cop",
"14": "csubj",
"15": "dep",
"16": "det",
"17": "discourse",
"18": "dislocated",
"19": "expl",
"20": "fixed",
"21": "flat",
"22": "goeswith",
"23": "iobj",
"24": "list",
"25": "mark",
"26": "nmod",
"27": "nsubj",
"28": "nummod",
"29": "obj",
"30": "obl",
"31": "orphan",
"32": "parataxis",
"33": "punct",
"34": "reparandum",
"35": "root",
"36": "vocative",
"37": "xcomp"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"_": 0,
"acl": 1,
"advcl": 2,
"advmod": 3,
"amod": 4,
"appos": 5,
"aux": 6,
"case": 7,
"cc": 8,
"ccomp": 9,
"clf": 10,
"compound": 11,
"conj": 12,
"cop": 13,
"csubj": 14,
"dep": 15,
"det": 16,
"discourse": 17,
"dislocated": 18,
"expl": 19,
"fixed": 20,
"flat": 21,
"goeswith": 22,
"iobj": 23,
"list": 24,
"mark": 25,
"nmod": 26,
"nsubj": 27,
"nummod": 28,
"obj": 29,
"obl": 30,
"orphan": 31,
"parataxis": 32,
"punct": 33,
"reparandum": 34,
"root": 35,
"vocative": 36,
"xcomp": 37
},
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"model_type": "bert",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": -100,
"position_embedding_type": "absolute",
"torch_dtype": "float32",
"transformers_version": "4.17.0",
"type_vocab_size": 2,
"use_cache": true,
"vocab_size": 28996
}