WayneChiu commited on
Commit
cc52677
1 Parent(s): dcdbb0f

Training done

Browse files
added_tokens.json ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</s_ocr>": 57604,
3
+ "</s_一般诊疗费>": 57596,
4
+ "</s_个人现金支付>": 57582,
5
+ "</s_个人自付>": 57580,
6
+ "</s_个人账户支付>": 57578,
7
+ "</s_中成药费>": 57594,
8
+ "</s_中药饮片>": 57600,
9
+ "</s_价税合计(大写)>": 57586,
10
+ "</s_住院天数>": 57534,
11
+ "</s_入院日期>": 57532,
12
+ "</s_其他支付>": 57576,
13
+ "</s_出院日期>": 57530,
14
+ "</s_化验费>": 57574,
15
+ "</s_医保类型>": 57572,
16
+ "</s_医保统筹基金支付>": 57570,
17
+ "</s_医疗机构类型>": 57568,
18
+ "</s_医院名称>": 57528,
19
+ "</s_卫生材料费>": 57566,
20
+ "</s_合计金额(大写)>": 57564,
21
+ "</s_复核人>": 57562,
22
+ "</s_年龄>": 57526,
23
+ "</s_床位费>": 57560,
24
+ "</s_开票日期>": 57558,
25
+ "</s_性别>": 57598,
26
+ "</s_手术费>": 57556,
27
+ "</s_护理费>": 57554,
28
+ "</s_挂号费>": 57592,
29
+ "</s_收款人>": 57552,
30
+ "</s_收款单位>": 57550,
31
+ "</s_校验码>": 57548,
32
+ "</s_检查费>": 57546,
33
+ "</s_治疗费>": 57544,
34
+ "</s_票据代码>": 57542,
35
+ "</s_票据号码>": 57540,
36
+ "</s_统筹支付>": 57602,
37
+ "</s_自付一>": 57590,
38
+ "</s_自付二>": 57588,
39
+ "</s_西药费>": 57538,
40
+ "</s_诊查费>": 57584,
41
+ "</s_(小写)>": 57536,
42
+ "<NULL>": 57605,
43
+ "<s_iitcdip>": 57523,
44
+ "<s_ocr>": 57603,
45
+ "<s_synthdog>": 57524,
46
+ "<s_一般诊疗费>": 57595,
47
+ "<s_个人现金支付>": 57581,
48
+ "<s_个人自付>": 57579,
49
+ "<s_个人账户支付>": 57577,
50
+ "<s_中成药费>": 57593,
51
+ "<s_中药饮片>": 57599,
52
+ "<s_价税合计(大写)>": 57585,
53
+ "<s_住院天数>": 57533,
54
+ "<s_入院日期>": 57531,
55
+ "<s_其他支付>": 57575,
56
+ "<s_出院日期>": 57529,
57
+ "<s_化验费>": 57573,
58
+ "<s_医保类型>": 57571,
59
+ "<s_医保统筹基金支付>": 57569,
60
+ "<s_医疗机构类型>": 57567,
61
+ "<s_医院名称>": 57527,
62
+ "<s_卫生材料费>": 57565,
63
+ "<s_合计金额(大写)>": 57563,
64
+ "<s_复核人>": 57561,
65
+ "<s_年龄>": 57525,
66
+ "<s_床位费>": 57559,
67
+ "<s_开票日期>": 57557,
68
+ "<s_性别>": 57597,
69
+ "<s_手术费>": 57555,
70
+ "<s_护理费>": 57553,
71
+ "<s_挂号费>": 57591,
72
+ "<s_收款人>": 57551,
73
+ "<s_收款单位>": 57549,
74
+ "<s_校验码>": 57547,
75
+ "<s_检查费>": 57545,
76
+ "<s_治疗费>": 57543,
77
+ "<s_票据代码>": 57541,
78
+ "<s_票据号码>": 57539,
79
+ "<s_统筹支付>": 57601,
80
+ "<s_自付一>": 57589,
81
+ "<s_自付二>": 57587,
82
+ "<s_西药费>": 57537,
83
+ "<s_诊查费>": 57583,
84
+ "<s_(小写)>": 57535,
85
+ "<sep/>": 57522
86
+ }
preprocessor_config.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_align_long_axis": false,
3
+ "do_normalize": true,
4
+ "do_pad": true,
5
+ "do_resize": true,
6
+ "do_thumbnail": true,
7
+ "feature_extractor_type": "DonutFeatureExtractor",
8
+ "image_mean": [
9
+ 0.5,
10
+ 0.5,
11
+ 0.5
12
+ ],
13
+ "image_std": [
14
+ 0.5,
15
+ 0.5,
16
+ 0.5
17
+ ],
18
+ "processor_class": "DonutProcessor",
19
+ "resample": 2,
20
+ "size": [
21
+ 960,
22
+ 1280
23
+ ]
24
+ }
sentencepiece.bpe.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cb9e3dce4c326195d08fc3dd0f7e2eee1da8595c847bf4c1a9c78b7a82d47e2d
3
+ size 1296245
special_tokens_map.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<s_iitcdip>",
4
+ "<s_synthdog>"
5
+ ],
6
+ "bos_token": "<s>",
7
+ "cls_token": "<s>",
8
+ "eos_token": "</s>",
9
+ "mask_token": {
10
+ "content": "<mask>",
11
+ "lstrip": true,
12
+ "normalized": true,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": "<pad>",
17
+ "sep_token": "</s>",
18
+ "unk_token": "<unk>"
19
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "cls_token": "<s>",
4
+ "eos_token": "</s>",
5
+ "mask_token": {
6
+ "__type": "AddedToken",
7
+ "content": "<mask>",
8
+ "lstrip": true,
9
+ "normalized": true,
10
+ "rstrip": false,
11
+ "single_word": false
12
+ },
13
+ "name_or_path": "nielsr/donut-base",
14
+ "pad_token": "<pad>",
15
+ "processor_class": "DonutProcessor",
16
+ "sep_token": "</s>",
17
+ "sp_model_kwargs": {},
18
+ "special_tokens_map_file": null,
19
+ "tokenizer_class": "XLMRobertaTokenizer",
20
+ "unk_token": "<unk>"
21
+ }