Training in progress, epoch 1
Browse files- README.md +80 -0
- config.json +43 -0
- logs/events.out.tfevents.1751938684.7a9bc2c7ed3e.2194.0 +3 -0
- logs/events.out.tfevents.1751940015.7a9bc2c7ed3e.19205.0 +3 -0
- model.safetensors +3 -0
- preprocessor_config.json +13 -0
- special_tokens_map.json +37 -0
- tokenizer.json +0 -0
- tokenizer_config.json +81 -0
- training_args.bin +3 -0
- vocab.txt +0 -0
README.md
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
library_name: transformers
|
| 3 |
+
license: mit
|
| 4 |
+
base_model: microsoft/layoutlm-base-uncased
|
| 5 |
+
tags:
|
| 6 |
+
- generated_from_trainer
|
| 7 |
+
model-index:
|
| 8 |
+
- name: layoutlm-funsd
|
| 9 |
+
results: []
|
| 10 |
+
---
|
| 11 |
+
|
| 12 |
+
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
| 13 |
+
should probably proofread and complete it, then remove this comment. -->
|
| 14 |
+
|
| 15 |
+
# layoutlm-funsd
|
| 16 |
+
|
| 17 |
+
This model is a fine-tuned version of [microsoft/layoutlm-base-uncased](https://huggingface.co/microsoft/layoutlm-base-uncased) on an unknown dataset.
|
| 18 |
+
It achieves the following results on the evaluation set:
|
| 19 |
+
- Loss: 0.7253
|
| 20 |
+
- Answer: {'precision': 0.70995670995671, 'recall': 0.8108776266996292, 'f1': 0.7570686670513561, 'number': 809}
|
| 21 |
+
- Header: {'precision': 0.3333333333333333, 'recall': 0.3445378151260504, 'f1': 0.33884297520661155, 'number': 119}
|
| 22 |
+
- Question: {'precision': 0.7931034482758621, 'recall': 0.8422535211267606, 'f1': 0.8169398907103825, 'number': 1065}
|
| 23 |
+
- Overall Precision: 0.7319
|
| 24 |
+
- Overall Recall: 0.7998
|
| 25 |
+
- Overall F1: 0.7643
|
| 26 |
+
- Overall Accuracy: 0.8025
|
| 27 |
+
|
| 28 |
+
## Model description
|
| 29 |
+
|
| 30 |
+
More information needed
|
| 31 |
+
|
| 32 |
+
## Intended uses & limitations
|
| 33 |
+
|
| 34 |
+
More information needed
|
| 35 |
+
|
| 36 |
+
## Training and evaluation data
|
| 37 |
+
|
| 38 |
+
More information needed
|
| 39 |
+
|
| 40 |
+
## Training procedure
|
| 41 |
+
|
| 42 |
+
### Training hyperparameters
|
| 43 |
+
|
| 44 |
+
The following hyperparameters were used during training:
|
| 45 |
+
- learning_rate: 3e-05
|
| 46 |
+
- train_batch_size: 16
|
| 47 |
+
- eval_batch_size: 8
|
| 48 |
+
- seed: 42
|
| 49 |
+
- optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
|
| 50 |
+
- lr_scheduler_type: linear
|
| 51 |
+
- num_epochs: 15
|
| 52 |
+
- mixed_precision_training: Native AMP
|
| 53 |
+
|
| 54 |
+
### Training results
|
| 55 |
+
|
| 56 |
+
| Training Loss | Epoch | Step | Validation Loss | Answer | Header | Question | Overall Precision | Overall Recall | Overall F1 | Overall Accuracy |
|
| 57 |
+
|:-------------:|:-----:|:----:|:---------------:|:-------------------------------------------------------------------------------------------------------------:|:-----------------------------------------------------------------------------------------------------------:|:------------------------------------------------------------------------------------------------------------:|:-----------------:|:--------------:|:----------:|:----------------:|
|
| 58 |
+
| 1.8167 | 1.0 | 10 | 1.6314 | {'precision': 0.01308139534883721, 'recall': 0.011124845488257108, 'f1': 0.012024048096192386, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.14820143884892087, 'recall': 0.09671361502347418, 'f1': 0.11704545454545455, 'number': 1065} | 0.0810 | 0.0562 | 0.0664 | 0.3378 |
|
| 59 |
+
| 1.4882 | 2.0 | 20 | 1.3037 | {'precision': 0.1090686274509804, 'recall': 0.1100123609394314, 'f1': 0.10953846153846154, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.39293598233995586, 'recall': 0.5014084507042254, 'f1': 0.4405940594059406, 'number': 1065} | 0.2864 | 0.3126 | 0.2989 | 0.5571 |
|
| 60 |
+
| 1.1329 | 3.0 | 30 | 0.9825 | {'precision': 0.46, 'recall': 0.511742892459827, 'f1': 0.4844938560561732, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.5010366275051832, 'recall': 0.6807511737089202, 'f1': 0.5772292993630574, 'number': 1065} | 0.4774 | 0.5715 | 0.5202 | 0.6939 |
|
| 61 |
+
| 0.8727 | 4.0 | 40 | 0.8129 | {'precision': 0.5495951417004049, 'recall': 0.6711990111248455, 'f1': 0.6043405676126878, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.626418152350081, 'recall': 0.7258215962441315, 'f1': 0.6724662896911702, 'number': 1065} | 0.5774 | 0.6603 | 0.6161 | 0.7443 |
|
| 62 |
+
| 0.6968 | 5.0 | 50 | 0.7437 | {'precision': 0.5873362445414847, 'recall': 0.6650185414091471, 'f1': 0.6237681159420291, 'number': 809} | {'precision': 0.1717171717171717, 'recall': 0.14285714285714285, 'f1': 0.15596330275229356, 'number': 119} | {'precision': 0.6337295690936107, 'recall': 0.8009389671361502, 'f1': 0.7075902115304852, 'number': 1065} | 0.5964 | 0.7065 | 0.6468 | 0.7722 |
|
| 63 |
+
| 0.5935 | 6.0 | 60 | 0.6977 | {'precision': 0.6307541625857003, 'recall': 0.796044499381953, 'f1': 0.7038251366120217, 'number': 809} | {'precision': 0.23170731707317074, 'recall': 0.15966386554621848, 'f1': 0.1890547263681592, 'number': 119} | {'precision': 0.7163920208152645, 'recall': 0.7755868544600939, 'f1': 0.7448151487826872, 'number': 1065} | 0.6600 | 0.7471 | 0.7009 | 0.7850 |
|
| 64 |
+
| 0.5186 | 7.0 | 70 | 0.6795 | {'precision': 0.6838709677419355, 'recall': 0.7861557478368356, 'f1': 0.7314548591144335, 'number': 809} | {'precision': 0.31958762886597936, 'recall': 0.2605042016806723, 'f1': 0.28703703703703703, 'number': 119} | {'precision': 0.7390557939914163, 'recall': 0.8084507042253521, 'f1': 0.7721973094170403, 'number': 1065} | 0.6971 | 0.7667 | 0.7302 | 0.7968 |
|
| 65 |
+
| 0.4576 | 8.0 | 80 | 0.6670 | {'precision': 0.6711340206185566, 'recall': 0.8046971569839307, 'f1': 0.7318718381112984, 'number': 809} | {'precision': 0.25225225225225223, 'recall': 0.23529411764705882, 'f1': 0.2434782608695652, 'number': 119} | {'precision': 0.7412765957446809, 'recall': 0.8178403755868544, 'f1': 0.7776785714285714, 'number': 1065} | 0.6871 | 0.7777 | 0.7296 | 0.8010 |
|
| 66 |
+
| 0.3975 | 9.0 | 90 | 0.6732 | {'precision': 0.6915584415584416, 'recall': 0.7898640296662547, 'f1': 0.7374495095210617, 'number': 809} | {'precision': 0.272, 'recall': 0.2857142857142857, 'f1': 0.27868852459016397, 'number': 119} | {'precision': 0.7510548523206751, 'recall': 0.8356807511737089, 'f1': 0.7911111111111111, 'number': 1065} | 0.6996 | 0.7842 | 0.7395 | 0.8006 |
|
| 67 |
+
| 0.3865 | 10.0 | 100 | 0.6818 | {'precision': 0.6941798941798942, 'recall': 0.8108776266996292, 'f1': 0.7480045610034207, 'number': 809} | {'precision': 0.28695652173913044, 'recall': 0.2773109243697479, 'f1': 0.2820512820512821, 'number': 119} | {'precision': 0.7790492957746479, 'recall': 0.8309859154929577, 'f1': 0.8041799182189914, 'number': 1065} | 0.7168 | 0.7898 | 0.7515 | 0.8007 |
|
| 68 |
+
| 0.3278 | 11.0 | 110 | 0.6996 | {'precision': 0.7050053248136315, 'recall': 0.8182941903584673, 'f1': 0.7574370709382151, 'number': 809} | {'precision': 0.325, 'recall': 0.3277310924369748, 'f1': 0.3263598326359833, 'number': 119} | {'precision': 0.7785527462946817, 'recall': 0.8384976525821596, 'f1': 0.8074141048824593, 'number': 1065} | 0.7226 | 0.7998 | 0.7592 | 0.8015 |
|
| 69 |
+
| 0.3097 | 12.0 | 120 | 0.7068 | {'precision': 0.7093649085037675, 'recall': 0.8145859085290482, 'f1': 0.7583429228998849, 'number': 809} | {'precision': 0.32456140350877194, 'recall': 0.31092436974789917, 'f1': 0.31759656652360513, 'number': 119} | {'precision': 0.7866666666666666, 'recall': 0.8309859154929577, 'f1': 0.8082191780821917, 'number': 1065} | 0.7292 | 0.7933 | 0.7599 | 0.8008 |
|
| 70 |
+
| 0.2962 | 13.0 | 130 | 0.7236 | {'precision': 0.7118463180362861, 'recall': 0.8244746600741656, 'f1': 0.7640320733104239, 'number': 809} | {'precision': 0.3217391304347826, 'recall': 0.31092436974789917, 'f1': 0.3162393162393162, 'number': 119} | {'precision': 0.7907801418439716, 'recall': 0.8375586854460094, 'f1': 0.8134974920200638, 'number': 1065} | 0.7321 | 0.8008 | 0.7649 | 0.8050 |
|
| 71 |
+
| 0.2711 | 14.0 | 140 | 0.7240 | {'precision': 0.7082429501084598, 'recall': 0.8071693448702101, 'f1': 0.754477180820335, 'number': 809} | {'precision': 0.3333333333333333, 'recall': 0.3445378151260504, 'f1': 0.33884297520661155, 'number': 119} | {'precision': 0.7931034482758621, 'recall': 0.8422535211267606, 'f1': 0.8169398907103825, 'number': 1065} | 0.7312 | 0.7983 | 0.7633 | 0.8017 |
|
| 72 |
+
| 0.2721 | 15.0 | 150 | 0.7253 | {'precision': 0.70995670995671, 'recall': 0.8108776266996292, 'f1': 0.7570686670513561, 'number': 809} | {'precision': 0.3333333333333333, 'recall': 0.3445378151260504, 'f1': 0.33884297520661155, 'number': 119} | {'precision': 0.7931034482758621, 'recall': 0.8422535211267606, 'f1': 0.8169398907103825, 'number': 1065} | 0.7319 | 0.7998 | 0.7643 | 0.8025 |
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
### Framework versions
|
| 76 |
+
|
| 77 |
+
- Transformers 4.53.0
|
| 78 |
+
- Pytorch 2.6.0+cu124
|
| 79 |
+
- Datasets 3.6.0
|
| 80 |
+
- Tokenizers 0.21.2
|
config.json
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"architectures": [
|
| 3 |
+
"LayoutLMForTokenClassification"
|
| 4 |
+
],
|
| 5 |
+
"attention_probs_dropout_prob": 0.1,
|
| 6 |
+
"hidden_act": "gelu",
|
| 7 |
+
"hidden_dropout_prob": 0.1,
|
| 8 |
+
"hidden_size": 768,
|
| 9 |
+
"id2label": {
|
| 10 |
+
"0": "O",
|
| 11 |
+
"1": "B-HEADER",
|
| 12 |
+
"2": "I-HEADER",
|
| 13 |
+
"3": "B-QUESTION",
|
| 14 |
+
"4": "I-QUESTION",
|
| 15 |
+
"5": "B-ANSWER",
|
| 16 |
+
"6": "I-ANSWER"
|
| 17 |
+
},
|
| 18 |
+
"initializer_range": 0.02,
|
| 19 |
+
"intermediate_size": 3072,
|
| 20 |
+
"label2id": {
|
| 21 |
+
"B-ANSWER": 5,
|
| 22 |
+
"B-HEADER": 1,
|
| 23 |
+
"B-QUESTION": 3,
|
| 24 |
+
"I-ANSWER": 6,
|
| 25 |
+
"I-HEADER": 2,
|
| 26 |
+
"I-QUESTION": 4,
|
| 27 |
+
"O": 0
|
| 28 |
+
},
|
| 29 |
+
"layer_norm_eps": 1e-12,
|
| 30 |
+
"max_2d_position_embeddings": 1024,
|
| 31 |
+
"max_position_embeddings": 512,
|
| 32 |
+
"model_type": "layoutlm",
|
| 33 |
+
"num_attention_heads": 12,
|
| 34 |
+
"num_hidden_layers": 12,
|
| 35 |
+
"output_past": true,
|
| 36 |
+
"pad_token_id": 0,
|
| 37 |
+
"position_embedding_type": "absolute",
|
| 38 |
+
"torch_dtype": "float32",
|
| 39 |
+
"transformers_version": "4.53.0",
|
| 40 |
+
"type_vocab_size": 2,
|
| 41 |
+
"use_cache": true,
|
| 42 |
+
"vocab_size": 30522
|
| 43 |
+
}
|
logs/events.out.tfevents.1751938684.7a9bc2c7ed3e.2194.0
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:0af8117fa961570b460b43f4620b0ad74fb7adb2da0c65c27e0fbccc53552670
|
| 3 |
+
size 16149
|
logs/events.out.tfevents.1751940015.7a9bc2c7ed3e.19205.0
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:b6e5c213d8744385f505862580cdbcda009c59227d2df591b073a32fa05cf3f8
|
| 3 |
+
size 5928
|
model.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7d95fb3ac4eba2df4fc8a90038fca061ff5236adb4c7c044a51b48b2c55eb5ac
|
| 3 |
+
size 450558212
|
preprocessor_config.json
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"apply_ocr": true,
|
| 3 |
+
"do_resize": true,
|
| 4 |
+
"image_processor_type": "LayoutLMv2ImageProcessor",
|
| 5 |
+
"ocr_lang": null,
|
| 6 |
+
"processor_class": "LayoutLMv2Processor",
|
| 7 |
+
"resample": 2,
|
| 8 |
+
"size": {
|
| 9 |
+
"height": 224,
|
| 10 |
+
"width": 224
|
| 11 |
+
},
|
| 12 |
+
"tesseract_config": ""
|
| 13 |
+
}
|
special_tokens_map.json
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"cls_token": {
|
| 3 |
+
"content": "[CLS]",
|
| 4 |
+
"lstrip": false,
|
| 5 |
+
"normalized": false,
|
| 6 |
+
"rstrip": false,
|
| 7 |
+
"single_word": false
|
| 8 |
+
},
|
| 9 |
+
"mask_token": {
|
| 10 |
+
"content": "[MASK]",
|
| 11 |
+
"lstrip": false,
|
| 12 |
+
"normalized": false,
|
| 13 |
+
"rstrip": false,
|
| 14 |
+
"single_word": false
|
| 15 |
+
},
|
| 16 |
+
"pad_token": {
|
| 17 |
+
"content": "[PAD]",
|
| 18 |
+
"lstrip": false,
|
| 19 |
+
"normalized": false,
|
| 20 |
+
"rstrip": false,
|
| 21 |
+
"single_word": false
|
| 22 |
+
},
|
| 23 |
+
"sep_token": {
|
| 24 |
+
"content": "[SEP]",
|
| 25 |
+
"lstrip": false,
|
| 26 |
+
"normalized": false,
|
| 27 |
+
"rstrip": false,
|
| 28 |
+
"single_word": false
|
| 29 |
+
},
|
| 30 |
+
"unk_token": {
|
| 31 |
+
"content": "[UNK]",
|
| 32 |
+
"lstrip": false,
|
| 33 |
+
"normalized": false,
|
| 34 |
+
"rstrip": false,
|
| 35 |
+
"single_word": false
|
| 36 |
+
}
|
| 37 |
+
}
|
tokenizer.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
tokenizer_config.json
ADDED
|
@@ -0,0 +1,81 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"added_tokens_decoder": {
|
| 3 |
+
"0": {
|
| 4 |
+
"content": "[PAD]",
|
| 5 |
+
"lstrip": false,
|
| 6 |
+
"normalized": false,
|
| 7 |
+
"rstrip": false,
|
| 8 |
+
"single_word": false,
|
| 9 |
+
"special": true
|
| 10 |
+
},
|
| 11 |
+
"100": {
|
| 12 |
+
"content": "[UNK]",
|
| 13 |
+
"lstrip": false,
|
| 14 |
+
"normalized": false,
|
| 15 |
+
"rstrip": false,
|
| 16 |
+
"single_word": false,
|
| 17 |
+
"special": true
|
| 18 |
+
},
|
| 19 |
+
"101": {
|
| 20 |
+
"content": "[CLS]",
|
| 21 |
+
"lstrip": false,
|
| 22 |
+
"normalized": false,
|
| 23 |
+
"rstrip": false,
|
| 24 |
+
"single_word": false,
|
| 25 |
+
"special": true
|
| 26 |
+
},
|
| 27 |
+
"102": {
|
| 28 |
+
"content": "[SEP]",
|
| 29 |
+
"lstrip": false,
|
| 30 |
+
"normalized": false,
|
| 31 |
+
"rstrip": false,
|
| 32 |
+
"single_word": false,
|
| 33 |
+
"special": true
|
| 34 |
+
},
|
| 35 |
+
"103": {
|
| 36 |
+
"content": "[MASK]",
|
| 37 |
+
"lstrip": false,
|
| 38 |
+
"normalized": false,
|
| 39 |
+
"rstrip": false,
|
| 40 |
+
"single_word": false,
|
| 41 |
+
"special": true
|
| 42 |
+
}
|
| 43 |
+
},
|
| 44 |
+
"additional_special_tokens": [],
|
| 45 |
+
"apply_ocr": false,
|
| 46 |
+
"clean_up_tokenization_spaces": false,
|
| 47 |
+
"cls_token": "[CLS]",
|
| 48 |
+
"cls_token_box": [
|
| 49 |
+
0,
|
| 50 |
+
0,
|
| 51 |
+
0,
|
| 52 |
+
0
|
| 53 |
+
],
|
| 54 |
+
"do_basic_tokenize": true,
|
| 55 |
+
"do_lower_case": true,
|
| 56 |
+
"extra_special_tokens": {},
|
| 57 |
+
"mask_token": "[MASK]",
|
| 58 |
+
"model_max_length": 512,
|
| 59 |
+
"never_split": null,
|
| 60 |
+
"only_label_first_subword": true,
|
| 61 |
+
"pad_token": "[PAD]",
|
| 62 |
+
"pad_token_box": [
|
| 63 |
+
0,
|
| 64 |
+
0,
|
| 65 |
+
0,
|
| 66 |
+
0
|
| 67 |
+
],
|
| 68 |
+
"pad_token_label": -100,
|
| 69 |
+
"processor_class": "LayoutLMv2Processor",
|
| 70 |
+
"sep_token": "[SEP]",
|
| 71 |
+
"sep_token_box": [
|
| 72 |
+
1000,
|
| 73 |
+
1000,
|
| 74 |
+
1000,
|
| 75 |
+
1000
|
| 76 |
+
],
|
| 77 |
+
"strip_accents": null,
|
| 78 |
+
"tokenize_chinese_chars": true,
|
| 79 |
+
"tokenizer_class": "LayoutLMv2Tokenizer",
|
| 80 |
+
"unk_token": "[UNK]"
|
| 81 |
+
}
|
training_args.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:dd2c2fe385e999ddef639494141bfa60744af951e58f6f6c06405a7336242a2c
|
| 3 |
+
size 5368
|
vocab.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|