shannons's picture
add model
69a76fd
raw
history blame
1.52 kB
{
"architectures": [
"LayoutLMForTokenClassification"
],
"attention_probs_dropout_prob": 0.1,
"finetuning_task": "token_classification",
"gradient_checkpointing": false,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "Title",
"1": "Author",
"2": "Abstract",
"3": "Keywords",
"4": "Section",
"5": "Paragraph",
"6": "List",
"7": "Bibliography",
"8": "Equation",
"9": "Algorithm",
"10": "Figure",
"11": "Table",
"12": "Caption",
"13": "Header",
"14": "Footer",
"15": "Footnote"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"Abstract": "2",
"Algorithm": "9",
"Author": "1",
"Bibliography": "7",
"Caption": "12",
"Equation": "8",
"Figure": "10",
"Footer": "14",
"Footnote": "15",
"Header": "13",
"Keywords": "3",
"List": "6",
"Paragraph": "5",
"Section": "4",
"Table": "11",
"Title": "0"
},
"layer_norm_eps": 1e-12,
"max_2d_position_embeddings": 1024,
"max_position_embeddings": 512,
"model_type": "layoutlm",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"output_past": true,
"pad_token_id": 0,
"position_embedding_type": "absolute",
"transformers_version": "4.6.0",
"type_vocab_size": 2,
"use_cache": true,
"vila_preprocessor_config": {
"added_special_sepration_token": "[BLK]",
"agg_level": "row",
"group_bbox_agg": "first"
},
"vocab_size": 30522
}