Shannon Shen
add model
c2fe7b7
raw
history blame
1.23 kB
{
"architectures": [
"HierarchicalModelForTokenClassification"
],
"finetuning_task": "token_classification",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "paragraph",
"1": "title",
"2": "equation",
"3": "reference",
"4": "section",
"5": "list",
"6": "table",
"7": "caption",
"8": "author",
"9": "abstract",
"10": "footer",
"11": "date",
"12": "figure"
},
"initializer_range": 0.02,
"label2id": {
"abstract": "9",
"author": "8",
"caption": "7",
"date": "11",
"equation": "2",
"figure": "12",
"footer": "10",
"list": "5",
"paragraph": "0",
"reference": "3",
"section": "4",
"table": "6",
"title": "1"
},
"load_weights_from_existing_model": false,
"model_type": "hierarchical_model",
"pad_token_id": 0,
"textline_encoder_output": "average",
"textline_encoder_type": "bert-layer",
"textline_encoder_used_bert_layer": "first",
"textline_model_type": "layoutlm-base-uncased",
"textline_model_used_bert_layer": "first",
"transformers_version": "4.7.0",
"vila_preprocessor_config": {
"agg_level": "row",
"group_bbox_agg": "first"
},
"vocab_size": 30522
}