config
Browse files- config.json +1 -30
config.json
CHANGED
|
@@ -27,33 +27,4 @@
|
|
| 27 |
"use_cache": true,
|
| 28 |
"vocab_size": 50265
|
| 29 |
}
|
| 30 |
-
|
| 31 |
-
// "_name_or_path": "microsoft/layoutlm-large-uncased",
|
| 32 |
-
// "architectures": [
|
| 33 |
-
// "LayoutLMForQuestionAnswering"
|
| 34 |
-
// ],
|
| 35 |
-
// "attention_probs_dropout_prob": 0.1,
|
| 36 |
-
// "bos_token_id": 0,
|
| 37 |
-
// "classifier_dropout": null,
|
| 38 |
-
// "eos_token_id": 2,
|
| 39 |
-
// "gradient_checkpointing": false,
|
| 40 |
-
// "hidden_act": "gelu",
|
| 41 |
-
// "hidden_dropout_prob": 0.1,
|
| 42 |
-
// "hidden_size": 768,
|
| 43 |
-
// "initializer_range": 0.02,
|
| 44 |
-
// "intermediate_size": 3072,
|
| 45 |
-
// "layer_norm_eps": 1e-05,
|
| 46 |
-
// "max_2d_position_embeddings": 1024,
|
| 47 |
-
// "max_position_embeddings": 514,
|
| 48 |
-
// "model_type": "roberta",
|
| 49 |
-
// "num_attention_heads": 12,
|
| 50 |
-
// "num_hidden_layers": 12,
|
| 51 |
-
// "pad_token_id": 1,
|
| 52 |
-
// "position_embedding_type": "absolute",
|
| 53 |
-
// "tokenizer_class": "RobertaTokenizer",
|
| 54 |
-
// "torch_dtype": "float32",
|
| 55 |
-
// "transformers_version": "4.23.1",
|
| 56 |
-
// "type_vocab_size": 2,
|
| 57 |
-
// "use_cache": true,
|
| 58 |
-
// "vocab_size": 50265
|
| 59 |
-
// }
|
|
|
|
| 27 |
"use_cache": true,
|
| 28 |
"vocab_size": 50265
|
| 29 |
}
|
| 30 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|