iioSnail commited on
Commit
8d7c576
1 Parent(s): 47e9648
config.json ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "./ChineseBERT-base",
3
+ "architectures": [
4
+ "GlyceBertForMaskedLM"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "auto_map": {
8
+ "AutoModel": "modeling_glycebert.GlyceBertForMaskedLM"
9
+ },
10
+ "classifier_dropout": null,
11
+ "directionality": "bidi",
12
+ "gradient_checkpointing": false,
13
+ "hidden_act": "gelu",
14
+ "hidden_dropout_prob": 0.1,
15
+ "hidden_size": 768,
16
+ "initializer_range": 0.02,
17
+ "intermediate_size": 3072,
18
+ "layer_norm_eps": 1e-12,
19
+ "max_position_embeddings": 512,
20
+ "model_type": "bert",
21
+ "num_attention_heads": 12,
22
+ "num_hidden_layers": 12,
23
+ "output_past": true,
24
+ "pad_token_id": 0,
25
+ "pooler_fc_size": 768,
26
+ "pooler_num_attention_heads": 12,
27
+ "pooler_num_fc_layers": 3,
28
+ "pooler_size_per_head": 128,
29
+ "pooler_type": "first_token_transform",
30
+ "position_embedding_type": "absolute",
31
+ "torch_dtype": "float32",
32
+ "transformers_version": "4.27.1",
33
+ "type_vocab_size": 2,
34
+ "use_cache": true,
35
+ "vocab_size": 23236
36
+ }
config/STFANGSO.TTF24.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:09e11244c473c9272b6e9d2b81aaead7d3adbeb9c4c6fde5c5fe495401ceb065
3
+ size 107071616
config/STXINGKA.TTF24.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:531e568202ceff79cc621775050b36c1c627463fc535fa3231a76c904b886bfd
3
+ size 107071616
config/id2pinyin.json ADDED
The diff for this file is too large to render. See raw diff
 
config/pinyin2tensor.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"ling2": [17, 14, 19, 12, 2, 0, 0, 0], "yuan2": [30, 26, 6, 19, 2, 0, 0, 0], "xing1": [29, 14, 19, 12, 1, 0, 0, 0], "yi1": [30, 14, 1, 0, 0, 0, 0, 0], "yi2": [30, 14, 2, 0, 0, 0, 0, 0], "yi4": [30, 14, 4, 0, 0, 0, 0, 0], "ding1": [9, 14, 19, 12, 1, 0, 0, 0], "zheng1": [31, 13, 10, 19, 12, 1, 0, 0], "qi1": [22, 14, 1, 0, 0, 0, 0, 0], "qi2": [22, 14, 2, 0, 0, 0, 0, 0], "wan4": [28, 6, 19, 4, 0, 0, 0, 0], "mo4": [18, 20, 4, 0, 0, 0, 0, 0], "zhang4": [31, 13, 6, 19, 12, 4, 0, 0], "san1": [24, 6, 19, 1, 0, 0, 0, 0], "shang4": [24, 13, 6, 19, 12, 4, 0, 0], "shang3": [24, 13, 6, 19, 12, 3, 0, 0], "xia4": [29, 14, 6, 4, 0, 0, 0, 0], "bu4": [7, 26, 4, 0, 0, 0, 0, 0], "fou3": [11, 20, 26, 3, 0, 0, 0, 0], "fou1": [11, 20, 26, 1, 0, 0, 0, 0], "fu1": [11, 26, 1, 0, 0, 0, 0, 0], "bu2": [7, 26, 2, 0, 0, 0, 0, 0], "yu3": [30, 26, 3, 0, 0, 0, 0, 0], "yu2": [30, 26, 2, 0, 0, 0, 0, 0], "yu4": [30, 26, 4, 0, 0, 0, 0, 0], "gai4": [12, 6, 14, 4, 0, 0, 0, 0], "chou3": [8, 13, 20, 26, 3, 0, 0, 0], "zhuan1": [31, 13, 26, 6, 19, 1, 0, 0], "qie3": [22, 14, 10, 3, 0, 0, 0, 0], "ju1": [15, 26, 1, 0, 0, 0, 0, 0], "cu2": [8, 26, 2, 0, 0, 0, 0, 0], "pi1": [21, 14, 1, 0, 0, 0, 0, 0], "shi4": [24, 13, 14, 4, 0, 0, 0, 0], "qiu1": [22, 14, 26, 1, 0, 0, 0, 0], "bing3": [7, 14, 19, 12, 3, 0, 0, 0], "bing4": [7, 14, 19, 12, 4, 0, 0, 0], "ye4": [30, 10, 4, 0, 0, 0, 0, 0], "cong2": [8, 20, 19, 12, 2, 0, 0, 0], "dong1": [9, 20, 19, 12, 1, 0, 0, 0], "si1": [24, 14, 1, 0, 0, 0, 0, 0], "cheng2": [8, 13, 10, 19, 12, 2, 0, 0], "sheng4": [24, 13, 10, 19, 12, 4, 0, 0], "zheng3": [31, 13, 10, 19, 12, 3, 0, 0], "diu1": [9, 14, 26, 1, 0, 0, 0, 0], "liang3": [17, 14, 6, 19, 12, 3, 0, 0], "yan2": [30, 6, 19, 2, 0, 0, 0, 0], "ban4": [7, 6, 19, 4, 0, 0, 0, 0], "bang4": [7, 6, 19, 12, 4, 0, 0, 0], "sang4": [24, 6, 19, 12, 4, 0, 0, 0], "sang1": [24, 6, 19, 12, 1, 0, 0, 0], "gun3": [12, 26, 19, 3, 0, 0, 0, 0], "ge4": [12, 10, 4, 0, 0, 0, 0, 0], "gan4": [12, 6, 19, 4, 0, 0, 0, 0], "ge3": [12, 10, 3, 0, 0, 0, 0, 0], "ya1": [30, 6, 1, 0, 0, 0, 0, 0], "zhong1": [31, 13, 20, 19, 12, 1, 0, 0], "zhong4": [31, 13, 20, 19, 12, 4, 0, 0], "feng1": [11, 10, 19, 12, 1, 0, 0, 0], "chuan4": [8, 13, 26, 6, 19, 4, 0, 0], "guan4": [12, 26, 6, 19, 4, 0, 0, 0], "quan4": [22, 26, 6, 19, 4, 0, 0, 0], "lin2": [17, 14, 19, 2, 0, 0, 0, 0], "zhu3": [31, 13, 26, 3, 0, 0, 0, 0], "wan2": [28, 6, 19, 2, 0, 0, 0, 0], "dan1": [9, 6, 19, 1, 0, 0, 0, 0], "wei4": [28, 10, 14, 4, 0, 0, 0, 0], "wei2": [28, 10, 14, 2, 0, 0, 0, 0], "zhu4": [31, 13, 26, 4, 0, 0, 0, 0], "jing3": [15, 14, 19, 12, 3, 0, 0, 0], "dan3": [9, 6, 19, 3, 0, 0, 0, 0], "li4": [17, 14, 4, 0, 0, 0, 0, 0], "li2": [17, 14, 2, 0, 0, 0, 0, 0], "ju3": [15, 26, 3, 0, 0, 0, 0, 0], "pie3": [21, 14, 10, 3, 0, 0, 0, 0], "ai4": [6, 14, 4, 0, 0, 0, 0, 0], "nai3": [19, 6, 14, 3, 0, 0, 0, 0], "ai3": [6, 14, 3, 0, 0, 0, 0, 0], "jiu3": [15, 14, 26, 3, 0, 0, 0, 0], "me": [18, 10, 5, 0, 0, 0, 0, 0], "yao1": [30, 6, 20, 1, 0, 0, 0, 0], "mo2": [18, 20, 2, 0, 0, 0, 0, 0], "ma": [18, 6, 5, 0, 0, 0, 0, 0], "zhi1": [31, 13, 14, 1, 0, 0, 0, 0], "zhu1": [31, 13, 26, 1, 0, 0, 0, 0], "zhi4": [31, 13, 14, 4, 0, 0, 0, 0], "wu1": [28, 26, 1, 0, 0, 0, 0, 0], "wu4": [28, 26, 4, 0, 0, 0, 0, 0], "zha4": [31, 13, 6, 4, 0, 0, 0, 0], "zuo4": [31, 26, 20, 4, 0, 0, 0, 0], "hu1": [13, 26, 1, 0, 0, 0, 0, 0], "fa2": [11, 6, 2, 0, 0, 0, 0, 0], "le4": [17, 10, 4, 0, 0, 0, 0, 0], "yue4": [30, 26, 10, 4, 0, 0, 0, 0], "ping1": [21, 14, 19, 12, 1, 0, 0, 0], "pang1": [21, 6, 19, 12, 1, 0, 0, 0], "qiao2": [22, 14, 6, 20, 2, 0, 0, 0], "guai1": [12, 26, 6, 14, 1, 0, 0, 0], "yi3": [30, 14, 3, 0, 0, 0, 0, 0], "jue2": [15, 26, 10, 2, 0, 0, 0, 0], "mie1": [18, 14, 10, 1, 0, 0, 0, 0], "nie4": [19, 14, 10, 4, 0, 0, 0, 0], "jiu1": [15, 14, 26, 1, 0, 0, 0, 0], "qi3": [22, 14, 3, 0, 0, 0, 0, 0], "qi4": [22, 14, 4, 0, 0, 0, 0, 0], "ye3": [30, 10, 3, 0, 0, 0, 0, 0], "xi2": [29, 14, 2, 0, 0, 0, 0, 0], "xiang1": [29, 14, 6, 19, 12, 1, 0, 0], "shu1": [24, 13, 26, 1, 0, 0, 0, 0], "ji1": [15, 14, 1, 0, 0, 0, 0, 0], "mai3": [18, 6, 14, 3, 0, 0, 0, 0], "luan4": [17, 26, 6, 19, 4, 0, 0, 0], "ru3": [23, 26, 3, 0, 0, 0, 0, 0], "qian2": [22, 14, 6, 19, 2, 0, 0, 0], "gan1": [12, 6, 19, 1, 0, 0, 0, 0], "gui1": [12, 26, 14, 1, 0, 0, 0, 0], "le": [17, 10, 5, 0, 0, 0, 0, 0], "liao3": [17, 14, 6, 20, 3, 0, 0, 0], "liao4": [17, 14, 6, 20, 4, 0, 0, 0], "zi4": [31, 14, 4, 0, 0, 0, 0, 0], "er4": [10, 23, 4, 0, 0, 0, 0, 0], "yu1": [30, 26, 1, 0, 0, 0, 0, 0], "xu1": [29, 26, 1, 0, 0, 0, 0, 0], "kui1": [16, 26, 14, 1, 0, 0, 0, 0], "yun2": [30, 26, 19, 2, 0, 0, 0, 0], "hu4": [13, 26, 4, 0, 0, 0, 0, 0], "wu3": [28, 26, 3, 0, 0, 0, 0, 0], "jing4": [15, 14, 19, 12, 4, 0, 0, 0], "gen4": [12, 10, 19, 4, 0, 0, 0, 0], "xuan1": [29, 26, 6, 19, 1, 0, 0, 0], "geng4": [12, 10, 19, 12, 4, 0, 0, 0], "ya4": [30, 6, 4, 0, 0, 0, 0, 0], "xie1": [29, 14, 10, 1, 0, 0, 0, 0], "suo4": [24, 26, 20, 4, 0, 0, 0, 0], "suo1": [24, 26, 20, 1, 0, 0, 0, 0], "e4": [10, 4, 0, 0, 0, 0, 0, 0], "ji2": [15, 14, 2, 0, 0, 0, 0, 0], "wang2": [28, 6, 19, 12, 2, 0, 0, 0], "wu2": [28, 26, 2, 0, 0, 0, 0, 0], "kang4": [16, 6, 19, 12, 4, 0, 0, 0], "gang1": [12, 6, 19, 12, 1, 0, 0, 0], "geng1": [12, 10, 19, 12, 1, 0, 0, 0], "jiao1": [15, 14, 6, 20, 1, 0, 0, 0], "hai4": [13, 6, 14, 4, 0, 0, 0, 0], "jie1": [15, 14, 10, 1, 0, 0, 0, 0], "chan3": [8, 13, 6, 19, 3, 0, 0, 0], "heng1": [13, 10, 19, 12, 1, 0, 0, 0], "xiang3": [29, 14, 6, 19, 12, 3, 0, 0], "peng1": [21, 10, 19, 12, 1, 0, 0, 0], "mu3": [18, 26, 3, 0, 0, 0, 0, 0], "jing1": [15, 14, 19, 12, 1, 0, 0, 0], "ting2": [25, 14, 19, 12, 2, 0, 0, 0], "liang4": [17, 14, 6, 19, 12, 4, 0, 0], "liang2": [17, 14, 6, 19, 12, 2, 0, 0], "qin1": [22, 14, 19, 1, 0, 0, 0, 0], "qing4": [22, 14, 19, 12, 4, 0, 0, 0], "bo2": [7, 20, 2, 0, 0, 0, 0, 0], "xie4": [29, 14, 10, 4, 0, 0, 0, 0], "ren2": [23, 10, 19, 2, 0, 0, 0, 0], "shen2": [24, 13, 10, 19, 2, 0, 0, 0], "shi2": [24, 13, 14, 2, 0, 0, 0, 0], "ding3": [9, 14, 19, 12, 3, 0, 0, 0], "ze4": [31, 10, 4, 0, 0, 0, 0, 0], "jin3": [15, 14, 19, 3, 0, 0, 0, 0], "fu4": [11, 26, 4, 0, 0, 0, 0, 0], "nu2": [19, 26, 2, 0, 0, 0, 0, 0], "jin4": [15, 14, 19, 4, 0, 0, 0, 0], "pu1": [21, 26, 1, 0, 0, 0, 0, 0], "pu2": [21, 26, 2, 0, 0, 0, 0, 0], "chou2": [8, 13, 20, 26, 2, 0, 0, 0], "qiu2": [22, 14, 26, 2, 0, 0, 0, 0], "jin1": [15, 14, 19, 1, 0, 0, 0, 0], "jie4": [15, 14, 10, 4, 0, 0, 0, 0], "reng2": [23, 10, 19, 12, 2, 0, 0, 0], "cong1": [8, 20, 19, 12, 1, 0, 0, 0], "fo2": [11, 20, 2, 0, 0, 0, 0, 0], "lun2": [17, 26, 19, 2, 0, 0, 0, 0], "cang1": [8, 6, 19, 12, 1, 0, 0, 0], "zai3": [31, 6, 14, 3, 0, 0, 0, 0], "zi3": [31, 14, 3, 0, 0, 0, 0, 0], "zi1": [31, 14, 1, 0, 0, 0, 0, 0], "ta1": [25, 6, 1, 0, 0, 0, 0, 0], "tuo2": [25, 26, 20, 2, 0, 0, 0, 0], "xian1": [29, 14, 6, 19, 1, 0, 0, 0], "xian3": [29, 14, 6, 19, 3, 0, 0, 0], "tong2": [25, 20, 19, 12, 2, 0, 0, 0], "ren4": [23, 10, 19, 4, 0, 0, 0, 0], "qian1": [22, 14, 6, 19, 1, 0, 0, 0], "dai4": [9, 6, 14, 4, 0, 0, 0, 0], "ling4": [17, 14, 19, 12, 4, 0, 0, 0], "lian2": [17, 14, 6, 19, 2, 0, 0, 0], "ling3": [17, 14, 19, 12, 3, 0, 0, 0], "si4": [24, 14, 4, 0, 0, 0, 0, 0], "sa1": [24, 6, 1, 0, 0, 0, 0, 0], "men": [18, 10, 19, 5, 0, 0, 0, 0], "fan3": [11, 6, 19, 3, 0, 0, 0, 0], "yang3": [30, 6, 19, 12, 3, 0, 0, 0], "ang2": [6, 19, 12, 2, 0, 0, 0, 0], "jian4": [15, 14, 6, 19, 4, 0, 0, 0], "mou2": [18, 20, 26, 2, 0, 0, 0, 0], "jia4": [15, 14, 6, 4, 0, 0, 0, 0], "jie": [15, 14, 10, 5, 0, 0, 0, 0], "lin4": [17, 14, 19, 4, 0, 0, 0, 0], "fen4": [11, 10, 19, 4, 0, 0, 0, 0], "bin1": [7, 14, 19, 1, 0, 0, 0, 0], "fang3": [11, 6, 19, 12, 3, 0, 0, 0], "pang2": [21, 6, 19, 12, 2, 0, 0, 0], "kang3": [16, 6, 19, 12, 3, 0, 0, 0], "ji4": [15, 14, 4, 0, 0, 0, 0, 0], "fu2": [11, 26, 2, 0, 0, 0, 0, 0], "xiu1": [29, 14, 26, 1, 0, 0, 0, 0], "xu4": [29, 26, 4, 0, 0, 0, 0, 0], "yin2": [30, 14, 19, 2, 0, 0, 0, 0], "you1": [30, 20, 26, 1, 0, 0, 0, 0], "you2": [30, 20, 26, 2, 0, 0, 0, 0], "huo3": [13, 26, 20, 3, 0, 0, 0, 0], "huo": [13, 26, 20, 5, 0, 0, 0, 0], "hui4": [13, 26, 14, 4, 0, 0, 0, 0], "kuai4": [16, 26, 6, 14, 4, 0, 0, 0], "san3": [24, 6, 19, 3, 0, 0, 0, 0], "wei3": [28, 10, 14, 3, 0, 0, 0, 0], "chuan2": [8, 13, 26, 6, 19, 2, 0, 0], "zhuan4": [31, 13, 26, 6, 19, 4, 0, 0], "ya2": [30, 6, 2, 0, 0, 0, 0, 0], "shang1": [24, 13, 6, 19, 12, 1, 0, 0], "ba4": [7, 6, 4, 0, 0, 0, 0, 0], "bai3": [7, 6, 14, 3, 0, 0, 0, 0], "gu1": [12, 26, 1, 0, 0, 0, 0, 0], "gu4": [12, 26, 4, 0, 0, 0, 0, 0], "pan4": [21, 6, 19, 4, 0, 0, 0, 0], "shen1": [24, 13, 10, 19, 1, 0, 0, 0], "ci4": [8, 14, 4, 0, 0, 0, 0, 0], "ga1": [12, 6, 1, 0, 0, 0, 0, 0], "jia1": [15, 14, 6, 1, 0, 0, 0, 0], "qie2": [22, 14, 10, 2, 0, 0, 0, 0], "dian4": [9, 14, 6, 19, 4, 0, 0, 0], "tian2": [25, 14, 6, 19, 2, 0, 0, 0], "dan4": [9, 6, 19, 4, 0, 0, 0, 0], "tan3": [25, 6, 19, 3, 0, 0, 0, 0], "yan4": [30, 6, 19, 4, 0, 0, 0, 0], "di1": [9, 14, 1, 0, 0, 0, 0, 0], "zuo3": [31, 26, 20, 3, 0, 0, 0, 0], "you4": [30, 20, 26, 4, 0, 0, 0, 0], "ti3": [25, 14, 3, 0, 0, 0, 0, 0], "ben4": [7, 10, 19, 4, 0, 0, 0, 0], "cui4": [8, 26, 14, 4, 0, 0, 0, 0], "ti1": [25, 14, 1, 0, 0, 0, 0, 0], "zhan4": [31, 13, 6, 19, 4, 0, 0, 0], "chan1": [8, 13, 6, 19, 1, 0, 0, 0], "dian1": [9, 14, 6, 19, 1, 0, 0, 0], "he2": [13, 10, 2, 0, 0, 0, 0, 0], "he4": [13, 10, 4, 0, 0, 0, 0, 0], "tuo1": [25, 26, 20, 1, 0, 0, 0, 0], "tuo4": [25, 26, 20, 4, 0, 0, 0, 0], "she2": [24, 13, 10, 2, 0, 0, 0, 0], "tu2": [25, 26, 2, 0, 0, 0, 0, 0], "xu2": [29, 26, 2, 0, 0, 0, 0, 0], "die2": [9, 14, 10, 2, 0, 0, 0, 0], "bi4": [7, 14, 4, 0, 0, 0, 0, 0], "zuo1": [31, 26, 20, 1, 0, 0, 0, 0], "zuo2": [31, 26, 20, 2, 0, 0, 0, 0], "gou1": [12, 20, 26, 1, 0, 0, 0, 0], "kou4": [16, 20, 26, 4, 0, 0, 0, 0], "ning4": [19, 14, 19, 12, 4, 0, 0, 0], "ni3": [19, 14, 3, 0, 0, 0, 0, 0], "qu2": [22, 26, 2, 0, 0, 0, 0, 0], "yong1": [30, 20, 19, 12, 1, 0, 0, 0], "yong4": [30, 20, 19, 12, 4, 0, 0, 0], "wa3": [28, 6, 3, 0, 0, 0, 0, 0], "pei4": [21, 10, 14, 4, 0, 0, 0, 0], "lao3": [17, 6, 20, 3, 0, 0, 0, 0], "liao2": [17, 14, 6, 20, 2, 0, 0, 0], "yang2": [30, 6, 19, 12, 2, 0, 0, 0], "tiao1": [25, 14, 6, 20, 1, 0, 0, 0], "tiao2": [25, 14, 6, 20, 2, 0, 0, 0], "tiao4": [25, 14, 6, 20, 4, 0, 0, 0], "diao3": [9, 14, 6, 20, 3, 0, 0, 0], "yao2": [30, 6, 20, 2, 0, 0, 0, 0], "dao4": [9, 6, 20, 4, 0, 0, 0, 0], "zhao4": [31, 13, 6, 20, 4, 0, 0, 0], "jiao3": [15, 14, 6, 20, 3, 0, 0, 0], "xiao2": [29, 14, 6, 20, 2, 0, 0, 0], "shi3": [24, 13, 14, 3, 0, 0, 0, 0], "kan3": [16, 6, 19, 3, 0, 0, 0, 0], "zhi2": [31, 13, 14, 2, 0, 0, 0, 0], "lai2": [17, 6, 14, 2, 0, 0, 0, 0], "lai4": [17, 6, 14, 4, 0, 0, 0, 0], "chi3": [8, 13, 14, 3, 0, 0, 0, 0], "lie4": [17, 14, 10, 4, 0, 0, 0, 0], "zhou1": [31, 13, 20, 26, 1, 0, 0, 0], "lun4": [17, 26, 19, 4, 0, 0, 0, 0], "dong4": [9, 20, 19, 12, 4, 0, 0, 0], "tong1": [25, 20, 19, 12, 1, 0, 0, 0], "tong3": [25, 20, 19, 12, 3, 0, 0, 0], "gong1": [12, 20, 19, 12, 1, 0, 0, 0], "gong4": [12, 20, 19, 12, 4, 0, 0, 0], "xia2": [29, 14, 6, 2, 0, 0, 0, 0], "lv3": [17, 27, 3, 0, 0, 0, 0, 0], "zhen1": [31, 13, 10, 19, 1, 0, 0, 0], "ce4": [8, 10, 4, 0, 0, 0, 0, 0], "zhai1": [31, 13, 6, 14, 1, 0, 0, 0], "nong2": [19, 20, 19, 12, 2, 0, 0, 0], "hou2": [13, 20, 26, 2, 0, 0, 0, 0], "hou4": [13, 20, 26, 4, 0, 0, 0, 0], "qin3": [22, 14, 19, 3, 0, 0, 0, 0], "ju2": [15, 26, 2, 0, 0, 0, 0, 0], "bian4": [7, 14, 6, 19, 4, 0, 0, 0], "pian2": [21, 14, 6, 19, 2, 0, 0, 0], "bian1": [7, 14, 6, 19, 1, 0, 0, 0], "xi4": [29, 14, 4, 0, 0, 0, 0, 0], "cu4": [8, 26, 4, 0, 0, 0, 0, 0], "chuo4": [8, 13, 26, 20, 4, 0, 0, 0], "e2": [10, 2, 0, 0, 0, 0, 0, 0], "jun4": [15, 26, 19, 4, 0, 0, 0, 0], "shun4": [24, 13, 26, 19, 4, 0, 0, 0], "dun1": [9, 26, 19, 1, 0, 0, 0, 0], "zu3": [31, 26, 3, 0, 0, 0, 0, 0], "qiao4": [22, 14, 6, 20, 4, 0, 0, 0], "xiao4": [29, 14, 6, 20, 4, 0, 0, 0], "xiao1": [29, 14, 6, 20, 1, 0, 0, 0], "yong3": [30, 20, 19, 12, 3, 0, 0, 0], "su2": [24, 26, 2, 0, 0, 0, 0, 0], "li3": [17, 14, 3, 0, 0, 0, 0, 0], "bao3": [7, 6, 20, 3, 0, 0, 0, 0], "shu4": [24, 13, 26, 4, 0, 0, 0, 0], "xin4": [29, 14, 19, 4, 0, 0, 0, 0], "yan3": [30, 6, 19, 3, 0, 0, 0, 0], "lia3": [17, 14, 6, 3, 0, 0, 0, 0], "jian3": [15, 14, 6, 19, 3, 0, 0, 0], "fu3": [11, 26, 3, 0, 0, 0, 0, 0], "ju4": [15, 26, 4, 0, 0, 0, 0, 0], "pai2": [21, 6, 14, 2, 0, 0, 0, 0], "feng4": [11, 10, 19, 12, 4, 0, 0, 0], "beng3": [7, 10, 19, 12, 3, 0, 0, 0], "an3": [6, 19, 3, 0, 0, 0, 0, 0], "bi3": [7, 14, 3, 0, 0, 0, 0, 0], "bei1": [7, 10, 14, 1, 0, 0, 0, 0], "pi4": [21, 14, 4, 0, 0, 0, 0, 0], "chuang4": [8, 13, 26, 6, 19, 12, 4, 0], "guan1": [12, 26, 6, 19, 1, 0, 0, 0], "bei4": [7, 10, 14, 4, 0, 0, 0, 0], "pei2": [21, 10, 14, 2, 0, 0, 0, 0], "men4": [18, 10, 19, 4, 0, 0, 0, 0], "men2": [18, 10, 19, 2, 0, 0, 0, 0], "dao3": [9, 6, 20, 3, 0, 0, 0, 0], "jue4": [15, 26, 10, 4, 0, 0, 0, 0], "xing4": [29, 14, 19, 12, 4, 0, 0, 0], "tang3": [25, 6, 19, 12, 3, 0, 0, 0], "chang2": [8, 13, 6, 19, 12, 2, 0, 0], "ti4": [25, 14, 4, 0, 0, 0, 0, 0], "diao4": [9, 14, 6, 20, 4, 0, 0, 0], "chang4": [8, 13, 6, 19, 12, 4, 0, 0], "chang1": [8, 13, 6, 19, 12, 1, 0, 0], "juan4": [15, 26, 6, 19, 4, 0, 0, 0], "qian4": [22, 14, 6, 19, 4, 0, 0, 0], "ni2": [19, 14, 2, 0, 0, 0, 0, 0], "ni4": [19, 14, 4, 0, 0, 0, 0, 0], "zhuo1": [31, 13, 26, 20, 1, 0, 0, 0], "wo1": [28, 20, 1, 0, 0, 0, 0, 0], "wei1": [28, 10, 14, 1, 0, 0, 0, 0], "wo3": [28, 20, 3, 0, 0, 0, 0, 0], "zhai4": [31, 13, 6, 14, 4, 0, 0, 0], "qing1": [22, 14, 19, 12, 1, 0, 0, 0], "jia3": [15, 14, 6, 3, 0, 0, 0, 0], "ge2": [12, 10, 2, 0, 0, 0, 0, 0], "jie2": [15, 14, 10, 2, 0, 0, 0, 0], "ruo4": [23, 26, 20, 4, 0, 0, 0, 0], "re4": [23, 10, 4, 0, 0, 0, 0, 0], "pian1": [21, 14, 6, 19, 1, 0, 0, 0], "xie2": [29, 14, 10, 2, 0, 0, 0, 0], "ou3": [20, 26, 3, 0, 0, 0, 0, 0], "tou1": [25, 20, 26, 1, 0, 0, 0, 0], "lou2": [17, 20, 26, 2, 0, 0, 0, 0], "gui4": [12, 26, 14, 4, 0, 0, 0, 0], "kui3": [16, 26, 14, 3, 0, 0, 0, 0], "beng1": [7, 10, 19, 12, 1, 0, 0, 0], "peng2": [21, 10, 19, 12, 2, 0, 0, 0], "xiang4": [29, 14, 6, 19, 12, 4, 0, 0], "dai3": [9, 6, 14, 3, 0, 0, 0, 0], "chu3": [8, 13, 26, 3, 0, 0, 0, 0], "nuo2": [19, 26, 20, 2, 0, 0, 0, 0], "cui1": [8, 26, 14, 1, 0, 0, 0, 0], "chong1": [8, 13, 20, 19, 12, 1, 0, 0], "ao4": [6, 20, 4, 0, 0, 0, 0, 0], "ao2": [6, 20, 2, 0, 0, 0, 0, 0], "sha3": [24, 13, 6, 3, 0, 0, 0, 0], "qing3": [22, 14, 19, 12, 3, 0, 0, 0], "xi1": [29, 14, 1, 0, 0, 0, 0, 0], "seng1": [24, 10, 19, 12, 1, 0, 0, 0], "ceng2": [8, 10, 19, 12, 2, 0, 0, 0], "zen4": [31, 10, 19, 4, 0, 0, 0, 0], "zhuang4": [31, 13, 26, 6, 19, 12, 4, 0], "chong4": [8, 13, 20, 19, 12, 4, 0, 0], "jiang1": [15, 14, 6, 19, 12, 1, 0, 0], "qia3": [22, 14, 6, 3, 0, 0, 0, 0], "shan4": [24, 13, 6, 19, 4, 0, 0, 0], "ru2": [23, 26, 2, 0, 0, 0, 0, 0], "chai2": [8, 13, 6, 14, 2, 0, 0, 0], "lei3": [17, 10, 14, 3, 0, 0, 0, 0], "lei2": [17, 10, 14, 2, 0, 0, 0, 0], "lei4": [17, 10, 14, 4, 0, 0, 0, 0], "chu2": [8, 13, 26, 2, 0, 0, 0, 0], "er2": [10, 23, 2, 0, 0, 0, 0, 0], "yun3": [30, 26, 19, 3, 0, 0, 0, 0], "xiong1": [29, 14, 20, 19, 12, 1, 0, 0], "kuang4": [16, 26, 6, 19, 12, 4, 0, 0], "guang1": [12, 26, 6, 19, 12, 1, 0, 0], "guang4": [12, 26, 6, 19, 12, 4, 0, 0], "ke4": [16, 10, 4, 0, 0, 0, 0, 0], "dui4": [9, 26, 14, 4, 0, 0, 0, 0], "mian3": [18, 14, 6, 19, 3, 0, 0, 0], "wen4": [28, 10, 19, 4, 0, 0, 0, 0], "wan3": [28, 6, 19, 3, 0, 0, 0, 0], "rui4": [23, 26, 14, 4, 0, 0, 0, 0], "duo2": [9, 26, 20, 2, 0, 0, 0, 0], "tu4": [25, 26, 4, 0, 0, 0, 0, 0], "dang3": [9, 6, 19, 12, 3, 0, 0, 0], "dou1": [9, 20, 26, 1, 0, 0, 0, 0], "ru4": [23, 26, 4, 0, 0, 0, 0, 0], "nei4": [19, 10, 14, 4, 0, 0, 0, 0], "quan2": [22, 26, 6, 19, 2, 0, 0, 0], "ba1": [7, 6, 1, 0, 0, 0, 0, 0], "ba2": [7, 6, 2, 0, 0, 0, 0, 0], "liu4": [17, 14, 26, 4, 0, 0, 0, 0], "lu4": [17, 26, 4, 0, 0, 0, 0, 0], "lan2": [17, 6, 19, 2, 0, 0, 0, 0], "gong3": [12, 20, 19, 12, 3, 0, 0, 0], "hong2": [13, 20, 19, 12, 2, 0, 0, 0], "tian1": [25, 14, 6, 19, 1, 0, 0, 0], "bing1": [7, 14, 19, 12, 1, 0, 0, 0], "dian3": [9, 14, 6, 19, 3, 0, 0, 0], "tian3": [25, 14, 6, 19, 3, 0, 0, 0], "ci2": [8, 14, 2, 0, 0, 0, 0, 0], "jian1": [15, 14, 6, 19, 1, 0, 0, 0], "shou4": [24, 13, 20, 26, 4, 0, 0, 0], "na4": [19, 6, 4, 0, 0, 0, 0, 0], "mao3": [18, 6, 20, 3, 0, 0, 0, 0], "ran3": [23, 6, 19, 3, 0, 0, 0, 0], "nan2": [19, 6, 19, 2, 0, 0, 0, 0], "zai4": [31, 6, 14, 4, 0, 0, 0, 0], "jiong3": [15, 14, 20, 19, 12, 3, 0, 0], "jiong1": [15, 14, 20, 19, 12, 1, 0, 0], "mao4": [18, 6, 20, 4, 0, 0, 0, 0], "rong3": [23, 20, 19, 12, 3, 0, 0, 0], "xie3": [29, 14, 10, 3, 0, 0, 0, 0], "jun1": [15, 26, 19, 1, 0, 0, 0, 0], "zhong3": [31, 13, 20, 19, 12, 3, 0, 0], "yuan1": [30, 26, 6, 19, 1, 0, 0, 0], "ming2": [18, 14, 19, 12, 2, 0, 0, 0], "mian2": [18, 14, 6, 19, 2, 0, 0, 0], "mian4": [18, 14, 6, 19, 4, 0, 0, 0], "mi4": [18, 14, 4, 0, 0, 0, 0, 0], "feng2": [11, 10, 19, 12, 2, 0, 0, 0], "ping2": [21, 14, 19, 12, 2, 0, 0, 0], "ning2": [19, 14, 19, 12, 2, 0, 0, 0], "leng3": [17, 10, 19, 12, 3, 0, 0, 0], "sheng3": [24, 13, 10, 19, 12, 3, 0, 0], "qia4": [22, 14, 6, 4, 0, 0, 0, 0], "cheng1": [8, 13, 10, 19, 12, 1, 0, 0], "zhun3": [31, 13, 26, 19, 3, 0, 0, 0], "song1": [24, 20, 19, 12, 1, 0, 0, 0], "diao1": [9, 14, 6, 20, 1, 0, 0, 0], "cou4": [8, 20, 26, 4, 0, 0, 0, 0], "lin3": [17, 14, 19, 3, 0, 0, 0, 0], "ji3": [15, 14, 3, 0, 0, 0, 0, 0], "fan2": [11, 6, 19, 2, 0, 0, 0, 0], "chu4": [8, 13, 26, 4, 0, 0, 0, 0], "zhi3": [31, 13, 14, 3, 0, 0, 0, 0], "kai3": [16, 6, 14, 3, 0, 0, 0, 0], "huang2": [13, 26, 6, 19, 12, 2, 0, 0], "deng4": [9, 10, 19, 12, 4, 0, 0, 0], "tu1": [25, 26, 1, 0, 0, 0, 0, 0], "ao1": [6, 20, 1, 0, 0, 0, 0, 0], "wa1": [28, 6, 1, 0, 0, 0, 0, 0], "chu1": [8, 13, 26, 1, 0, 0, 0, 0], "han2": [13, 6, 19, 2, 0, 0, 0, 0], "zao2": [31, 6, 20, 2, 0, 0, 0, 0], "dao1": [9, 6, 20, 1, 0, 0, 0, 0], "fen1": [11, 10, 19, 1, 0, 0, 0, 0], "fen2": [11, 10, 19, 2, 0, 0, 0, 0], "qie4": [22, 14, 10, 4, 0, 0, 0, 0], "qie1": [22, 14, 10, 1, 0, 0, 0, 0], "kan1": [16, 6, 19, 1, 0, 0, 0, 0], "wen3": [28, 10, 19, 3, 0, 0, 0, 0], "xing2": [29, 14, 19, 12, 2, 0, 0, 0], "hua4": [13, 26, 6, 4, 0, 0, 0, 0], "guo4": [12, 26, 20, 4, 0, 0, 0, 0], "guo3": [12, 26, 20, 3, 0, 0, 0, 0], "hua2": [13, 26, 6, 2, 0, 0, 0, 0], "huai": [13, 26, 6, 14, 5, 0, 0, 0], "liu2": [17, 14, 26, 2, 0, 0, 0, 0], "ze2": [31, 10, 2, 0, 0, 0, 0, 0], "chuang1": [8, 13, 26, 6, 19, 12, 1, 0], "shan1": [24, 13, 6, 19, 1, 0, 0, 0], "bie2": [7, 14, 10, 2, 0, 0, 0, 0], "pao2": [21, 6, 20, 2, 0, 0, 0, 0], "bao4": [7, 6, 20, 4, 0, 0, 0, 0], "bie4": [7, 14, 10, 4, 0, 0, 0, 0], "gua1": [12, 26, 6, 1, 0, 0, 0, 0], "shua1": [24, 13, 26, 6, 1, 0, 0, 0], "shua4": [24, 13, 26, 6, 4, 0, 0, 0], "xuan4": [29, 26, 6, 19, 4, 0, 0, 0], "sha1": [24, 13, 6, 1, 0, 0, 0, 0], "cha4": [8, 13, 6, 4, 0, 0, 0, 0], "ci1": [8, 14, 1, 0, 0, 0, 0, 0], "kei1": [16, 10, 14, 1, 0, 0, 0, 0], "duo4": [9, 26, 20, 4, 0, 0, 0, 0], "cuo4": [8, 26, 20, 4, 0, 0, 0, 0], "xue1": [29, 26, 10, 1, 0, 0, 0, 0], "shao4": [24, 13, 6, 20, 4, 0, 0, 0], "la2": [17, 6, 2, 0, 0, 0, 0, 0], "la4": [17, 6, 4, 0, 0, 0, 0, 0], "gua3": [12, 26, 6, 3, 0, 0, 0, 0], "pou1": [21, 20, 26, 1, 0, 0, 0, 0], "po3": [21, 20, 3, 0, 0, 0, 0, 0], "wan1": [28, 6, 19, 1, 0, 0, 0, 0], "bo1": [7, 20, 1, 0, 0, 0, 0, 0], "bao1": [7, 6, 20, 1, 0, 0, 0, 0], "ge1": [12, 10, 1, 0, 0, 0, 0, 0], "qiang1": [22, 14, 6, 19, 12, 1, 0, 0], "piao1": [21, 14, 6, 20, 1, 0, 0, 0], "piao4": [21, 14, 6, 20, 4, 0, 0, 0], "piao2": [21, 14, 6, 20, 2, 0, 0, 0], "biao3": [7, 14, 6, 20, 3, 0, 0, 0], "biao1": [7, 14, 6, 20, 1, 0, 0, 0], "chao1": [8, 13, 6, 20, 1, 0, 0, 0], "pi3": [21, 14, 3, 0, 0, 0, 0, 0], "tang1": [25, 6, 19, 12, 1, 0, 0, 0], "nu3": [19, 26, 3, 0, 0, 0, 0, 0], "lao2": [17, 6, 20, 2, 0, 0, 0, 0], "kai4": [16, 6, 14, 4, 0, 0, 0, 0], "xun1": [29, 26, 19, 1, 0, 0, 0, 0], "meng3": [18, 10, 19, 12, 3, 0, 0, 0], "lei1": [17, 10, 14, 1, 0, 0, 0, 0], "lei": [17, 10, 14, 5, 0, 0, 0, 0], "mao2": [18, 6, 20, 2, 0, 0, 0, 0], "lao4": [17, 6, 20, 4, 0, 0, 0, 0], "mu4": [18, 26, 4, 0, 0, 0, 0, 0], "qin2": [22, 14, 19, 2, 0, 0, 0, 0], "shao2": [24, 13, 6, 20, 2, 0, 0, 0], "shuo4": [24, 13, 26, 20, 4, 0, 0, 0], "zhuo2": [31, 13, 26, 20, 2, 0, 0, 0], "di4": [9, 14, 4, 0, 0, 0, 0, 0], "gou4": [12, 20, 26, 4, 0, 0, 0, 0], "yun4": [30, 26, 19, 4, 0, 0, 0, 0], "pin4": [21, 14, 19, 4, 0, 0, 0, 0], "hua1": [13, 26, 6, 1, 0, 0, 0, 0], "huo4": [13, 26, 20, 4, 0, 0, 0, 0], "bei3": [7, 10, 14, 3, 0, 0, 0, 0], "shi": [24, 13, 14, 5, 0, 0, 0, 0], "chi2": [8, 13, 14, 2, 0, 0, 0, 0], "za1": [31, 6, 1, 0, 0, 0, 0, 0], "jiang4": [15, 14, 6, 19, 12, 4, 0, 0], "kuang1": [16, 26, 6, 19, 12, 1, 0, 0], "wang1": [28, 6, 19, 12, 1, 0, 0, 0], "fei3": [11, 10, 14, 3, 0, 0, 0, 0], "fei1": [11, 10, 14, 1, 0, 0, 0, 0], "kui4": [16, 26, 14, 4, 0, 0, 0, 0], "qu1": [22, 26, 1, 0, 0, 0, 0, 0], "ou1": [20, 26, 1, 0, 0, 0, 0, 0], "bian3": [7, 14, 6, 19, 3, 0, 0, 0], "te4": [25, 10, 4, 0, 0, 0, 0, 0], "sa4": [24, 6, 4, 0, 0, 0, 0, 0], "sheng1": [24, 13, 10, 19, 12, 1, 0, 0], "pi2": [21, 14, 2, 0, 0, 0, 0, 0], "ban1": [7, 6, 19, 1, 0, 0, 0, 0], "zu2": [31, 26, 2, 0, 0, 0, 0, 0], "chan2": [8, 13, 6, 19, 2, 0, 0, 0], "mai4": [18, 6, 14, 4, 0, 0, 0, 0], "na1": [19, 6, 1, 0, 0, 0, 0, 0], "bo": [7, 20, 5, 0, 0, 0, 0, 0], "bu3": [7, 26, 3, 0, 0, 0, 0, 0], "pan2": [21, 6, 19, 2, 0, 0, 0, 0], "zhan1": [31, 13, 6, 19, 1, 0, 0, 0], "tie1": [25, 14, 10, 1, 0, 0, 0, 0], "ka3": [16, 6, 3, 0, 0, 0, 0, 0], "lu2": [17, 26, 2, 0, 0, 0, 0, 0], "lu3": [17, 26, 3, 0, 0, 0, 0, 0], "gua4": [12, 26, 6, 4, 0, 0, 0, 0], "wo4": [28, 20, 4, 0, 0, 0, 0, 0], "yin4": [30, 14, 19, 4, 0, 0, 0, 0], "que4": [22, 26, 10, 4, 0, 0, 0, 0], "luan3": [17, 26, 6, 19, 3, 0, 0, 0], "kun1": [16, 26, 19, 1, 0, 0, 0, 0], "juan3": [15, 26, 6, 19, 3, 0, 0, 0], "quan1": [22, 26, 6, 19, 1, 0, 0, 0], "chang3": [8, 13, 6, 19, 12, 3, 0, 0], "han3": [13, 6, 19, 3, 0, 0, 0, 0], "an1": [6, 19, 1, 0, 0, 0, 0, 0], "e3": [10, 3, 0, 0, 0, 0, 0, 0], "ting1": [25, 14, 19, 12, 1, 0, 0, 0], "si": [24, 14, 5, 0, 0, 0, 0, 0], "sha4": [24, 13, 6, 4, 0, 0, 0, 0], "jiu4": [15, 14, 26, 4, 0, 0, 0, 0], "yan1": [30, 6, 19, 1, 0, 0, 0, 0], "qu4": [22, 26, 4, 0, 0, 0, 0, 0], "xian4": [29, 14, 6, 19, 4, 0, 0, 0], "can1": [8, 6, 19, 1, 0, 0, 0, 0], "cen1": [8, 10, 19, 1, 0, 0, 0, 0], "can4": [8, 6, 19, 4, 0, 0, 0, 0], "cha1": [8, 13, 6, 1, 0, 0, 0, 0], "cha2": [8, 13, 6, 2, 0, 0, 0, 0], "cha3": [8, 13, 6, 3, 0, 0, 0, 0], "you3": [30, 20, 26, 3, 0, 0, 0, 0], "shuang1": [24, 13, 26, 6, 19, 12, 1, 0], "fan4": [11, 6, 19, 4, 0, 0, 0, 0], "shou1": [24, 13, 20, 26, 1, 0, 0, 0], "fa1": [11, 6, 1, 0, 0, 0, 0, 0], "fa4": [11, 6, 4, 0, 0, 0, 0, 0], "qu3": [22, 26, 3, 0, 0, 0, 0, 0], "sou3": [24, 20, 26, 3, 0, 0, 0, 0], "sou1": [24, 20, 26, 1, 0, 0, 0, 0], "kou3": [16, 20, 26, 3, 0, 0, 0, 0], "gu3": [12, 26, 3, 0, 0, 0, 0, 0], "ku1": [16, 26, 1, 0, 0, 0, 0, 0], "tao1": [25, 6, 20, 1, 0, 0, 0, 0], "dao2": [9, 6, 20, 2, 0, 0, 0, 0], "jiao4": [15, 14, 6, 20, 4, 0, 0, 0], "pa1": [21, 6, 1, 0, 0, 0, 0, 0], "ba": [7, 6, 5, 0, 0, 0, 0, 0], "ke3": [16, 10, 3, 0, 0, 0, 0, 0], "tai2": [25, 6, 14, 2, 0, 0, 0, 0], "tai1": [25, 6, 14, 1, 0, 0, 0, 0], "chi4": [8, 13, 14, 4, 0, 0, 0, 0], "hao4": [13, 6, 20, 4, 0, 0, 0, 0], "hao2": [13, 6, 20, 2, 0, 0, 0, 0], "tan4": [25, 6, 19, 4, 0, 0, 0, 0], "chi1": [8, 13, 14, 1, 0, 0, 0, 0], "cun4": [8, 26, 19, 4, 0, 0, 0, 0], "dou4": [9, 20, 26, 4, 0, 0, 0, 0], "ying1": [30, 14, 19, 12, 1, 0, 0, 0], "tong4": [25, 20, 19, 12, 4, 0, 0, 0], "ming4": [18, 14, 19, 12, 4, 0, 0, 0], "tu3": [25, 26, 3, 0, 0, 0, 0, 0], "zha1": [31, 13, 6, 1, 0, 0, 0, 0], "ha4": [13, 6, 4, 0, 0, 0, 0, 0], "a1": [6, 1, 0, 0, 0, 0, 0, 0], "ma2": [18, 6, 2, 0, 0, 0, 0, 0], "ma3": [18, 6, 3, 0, 0, 0, 0, 0], "tun1": [25, 26, 19, 1, 0, 0, 0, 0], "yin3": [30, 14, 19, 3, 0, 0, 0, 0], "fei4": [11, 10, 14, 4, 0, 0, 0, 0], "tun2": [25, 26, 19, 2, 0, 0, 0, 0], "tun3": [25, 26, 19, 3, 0, 0, 0, 0], "pen4": [21, 10, 19, 4, 0, 0, 0, 0], "han4": [13, 6, 19, 4, 0, 0, 0, 0], "keng1": [16, 10, 19, 12, 1, 0, 0, 0], "hang2": [13, 6, 19, 12, 2, 0, 0, 0], "hang4": [13, 6, 19, 12, 4, 0, 0, 0], "shun3": [24, 13, 26, 19, 3, 0, 0, 0], "chao3": [8, 13, 6, 20, 3, 0, 0, 0], "miao3": [18, 14, 6, 20, 3, 0, 0, 0], "chao4": [8, 13, 6, 20, 4, 0, 0, 0], "chui1": [8, 13, 26, 14, 1, 0, 0, 0], "chui4": [8, 13, 26, 14, 4, 0, 0, 0], "hou3": [13, 20, 26, 3, 0, 0, 0, 0], "hong1": [13, 20, 19, 12, 1, 0, 0, 0], "ya": [30, 6, 5, 0, 0, 0, 0, 0], "xia1": [29, 14, 6, 1, 0, 0, 0, 0], "e": [10, 5, 0, 0, 0, 0, 0, 0], "dai1": [9, 6, 14, 1, 0, 0, 0, 0], "ai2": [6, 14, 2, 0, 0, 0, 0, 0], "kuang2": [16, 26, 6, 19, 12, 2, 0, 0], "cheng3": [8, 13, 10, 19, 12, 3, 0, 0], "gao4": [12, 6, 20, 4, 0, 0, 0, 0], "ne4": [19, 10, 4, 0, 0, 0, 0, 0], "na": [19, 6, 5, 0, 0, 0, 0, 0], "nuo4": [19, 26, 20, 4, 0, 0, 0, 0], "ne": [19, 10, 5, 0, 0, 0, 0, 0], "ou4": [20, 26, 4, 0, 0, 0, 0, 0], "bei": [7, 10, 14, 5, 0, 0, 0, 0], "bai4": [7, 6, 14, 4, 0, 0, 0, 0], "qiang4": [22, 14, 6, 19, 12, 4, 0, 0], "ni1": [19, 14, 1, 0, 0, 0, 0, 0], "mei4": [18, 10, 14, 4, 0, 0, 0, 0], "he1": [13, 10, 1, 0, 0, 0, 0, 0], "ha1": [13, 6, 1, 0, 0, 0, 0, 0], "a": [6, 5, 0, 0, 0, 0, 0, 0], "ke1": [16, 10, 1, 0, 0, 0, 0, 0], "huo1": [13, 26, 20, 1, 0, 0, 0, 0], "a2": [6, 2, 0, 0, 0, 0, 0, 0], "a4": [6, 4, 0, 0, 0, 0, 0, 0], "pei1": [21, 10, 14, 1, 0, 0, 0, 0], "zui3": [31, 26, 14, 3, 0, 0, 0, 0], "xian2": [29, 14, 6, 19, 2, 0, 0, 0], "duo1": [9, 26, 20, 1, 0, 0, 0, 0], "za3": [31, 6, 3, 0, 0, 0, 0, 0], "huo2": [13, 26, 20, 2, 0, 0, 0, 0], "hu2": [13, 26, 2, 0, 0, 0, 0, 0], "gao1": [12, 6, 20, 1, 0, 0, 0, 0], "zhou4": [31, 13, 20, 26, 4, 0, 0, 0], "ka1": [16, 6, 1, 0, 0, 0, 0, 0], "nong4": [19, 20, 19, 12, 4, 0, 0, 0], "gu": [12, 26, 5, 0, 0, 0, 0, 0], "zuo": [31, 26, 20, 5, 0, 0, 0, 0], "long2": [17, 20, 19, 12, 2, 0, 0, 0], "lie3": [17, 14, 10, 3, 0, 0, 0, 0], "lie1": [17, 14, 10, 1, 0, 0, 0, 0], "lie2": [17, 14, 10, 2, 0, 0, 0, 0], "lie": [17, 14, 10, 5, 0, 0, 0, 0], "mie": [18, 14, 10, 5, 0, 0, 0, 0], "mi1": [18, 14, 1, 0, 0, 0, 0, 0], "mi3": [18, 14, 3, 0, 0, 0, 0, 0], "yao3": [30, 6, 20, 3, 0, 0, 0, 0], "luo4": [17, 26, 20, 4, 0, 0, 0, 0], "lo": [17, 20, 5, 0, 0, 0, 0, 0], "zan2": [31, 6, 19, 2, 0, 0, 0, 0], "za2": [31, 6, 2, 0, 0, 0, 0, 0], "zan": [31, 6, 19, 5, 0, 0, 0, 0], "hai1": [13, 6, 14, 1, 0, 0, 0, 0], "hai2": [13, 6, 14, 2, 0, 0, 0, 0], "ke2": [16, 10, 2, 0, 0, 0, 0, 0], "gai1": [12, 6, 14, 1, 0, 0, 0, 0], "xu3": [29, 26, 3, 0, 0, 0, 0, 0], "ai1": [6, 14, 1, 0, 0, 0, 0, 0], "pin3": [21, 14, 19, 3, 0, 0, 0, 0], "shen3": [24, 13, 10, 19, 3, 0, 0, 0], "hong4": [13, 20, 19, 12, 4, 0, 0, 0], "hong3": [13, 20, 19, 12, 3, 0, 0, 0], "die3": [9, 14, 10, 3, 0, 0, 0, 0], "wa": [28, 6, 5, 0, 0, 0, 0, 0], "wa2": [28, 6, 2, 0, 0, 0, 0, 0], "ha3": [13, 6, 3, 0, 0, 0, 0, 0], "ta4": [25, 6, 4, 0, 0, 0, 0, 0], "zai1": [31, 6, 14, 1, 0, 0, 0, 0], "die4": [9, 14, 10, 4, 0, 0, 0, 0], "pai4": [21, 6, 14, 4, 0, 0, 0, 0], "gen2": [12, 10, 19, 2, 0, 0, 0, 0], "hen3": [13, 10, 19, 3, 0, 0, 0, 0], "n4": [19, 4, 0, 0, 0, 0, 0, 0], "ya3": [30, 6, 3, 0, 0, 0, 0, 0], "da2": [9, 6, 2, 0, 0, 0, 0, 0], "yo1": [30, 20, 1, 0, 0, 0, 0, 0], "yo": [30, 20, 5, 0, 0, 0, 0, 0], "o2": [20, 2, 0, 0, 0, 0, 0, 0], "o4": [20, 4, 0, 0, 0, 0, 0, 0], "sao1": [24, 6, 20, 1, 0, 0, 0, 0], "sao4": [24, 6, 20, 4, 0, 0, 0, 0], "li1": [17, 14, 1, 0, 0, 0, 0, 0], "li": [17, 14, 5, 0, 0, 0, 0, 0], "mai2": [18, 6, 14, 2, 0, 0, 0, 0], "na3": [19, 6, 3, 0, 0, 0, 0, 0], "ne2": [19, 10, 2, 0, 0, 0, 0, 0], "nei3": [19, 10, 14, 3, 0, 0, 0, 0], "zhe2": [31, 13, 10, 2, 0, 0, 0, 0], "bu1": [7, 26, 1, 0, 0, 0, 0, 0], "hng": [13, 19, 12, 5, 0, 0, 0, 0], "geng3": [12, 10, 19, 12, 3, 0, 0, 0], "ying3": [30, 14, 19, 12, 3, 0, 0, 0], "ying4": [30, 14, 19, 12, 4, 0, 0, 0], "ng2": [19, 12, 2, 0, 0, 0, 0, 0], "n2": [19, 2, 0, 0, 0, 0, 0, 0], "chun2": [8, 13, 26, 19, 2, 0, 0, 0], "zhen4": [31, 13, 10, 19, 4, 0, 0, 0], "tang2": [25, 6, 19, 12, 2, 0, 0, 0], "m2": [18, 2, 0, 0, 0, 0, 0, 0], "huan4": [13, 26, 6, 19, 4, 0, 0, 0], "hu3": [13, 26, 3, 0, 0, 0, 0, 0], "guo2": [12, 26, 20, 2, 0, 0, 0, 0], "shu2": [24, 13, 26, 2, 0, 0, 0, 0], "nian4": [19, 14, 6, 19, 4, 0, 0, 0], "ken3": [16, 10, 19, 3, 0, 0, 0, 0], "len4": [17, 10, 19, 4, 0, 0, 0, 0], "a3": [6, 3, 0, 0, 0, 0, 0, 0], "tao2": [25, 6, 20, 2, 0, 0, 0, 0], "chuai4": [8, 13, 26, 6, 14, 4, 0, 0], "sha2": [24, 13, 6, 2, 0, 0, 0, 0], "la": [17, 6, 5, 0, 0, 0, 0, 0], "la1": [17, 6, 1, 0, 0, 0, 0, 0], "zhe3": [31, 13, 10, 3, 0, 0, 0, 0], "se4": [24, 10, 4, 0, 0, 0, 0, 0], "luo1": [17, 26, 20, 1, 0, 0, 0, 0], "ding4": [9, 14, 19, 12, 4, 0, 0, 0], "lang1": [17, 6, 19, 12, 1, 0, 0, 0], "ti2": [25, 14, 2, 0, 0, 0, 0, 0], "ke": [16, 10, 5, 0, 0, 0, 0, 0], "nan3": [19, 6, 19, 3, 0, 0, 0, 0], "la3": [17, 6, 3, 0, 0, 0, 0, 0], "kan4": [16, 6, 19, 4, 0, 0, 0, 0], "zha2": [31, 13, 6, 2, 0, 0, 0, 0], "wai1": [28, 6, 14, 1, 0, 0, 0, 0], "re3": [23, 10, 3, 0, 0, 0, 0, 0], "o1": [20, 1, 0, 0, 0, 0, 0, 0], "o": [20, 5, 0, 0, 0, 0, 0, 0], "chuan3": [8, 13, 26, 6, 19, 3, 0, 0], "xi3": [29, 14, 3, 0, 0, 0, 0, 0], "huai4": [13, 26, 6, 14, 4, 0, 0, 0], "xuan3": [29, 26, 6, 19, 3, 0, 0, 0], "tan2": [25, 6, 19, 2, 0, 0, 0, 0], "sun1": [24, 26, 19, 1, 0, 0, 0, 0], "zha": [31, 13, 6, 5, 0, 0, 0, 0], "miao1": [18, 14, 6, 20, 1, 0, 0, 0], "ying2": [30, 14, 19, 12, 2, 0, 0, 0], "pen1": [21, 10, 19, 1, 0, 0, 0, 0], "kui2": [16, 26, 14, 2, 0, 0, 0, 0], "lou": [17, 20, 26, 5, 0, 0, 0, 0], "xiu4": [29, 14, 26, 4, 0, 0, 0, 0], "ma4": [18, 6, 4, 0, 0, 0, 0, 0], "da1": [9, 6, 1, 0, 0, 0, 0, 0], "da": [9, 6, 5, 0, 0, 0, 0, 0], "sang3": [24, 6, 19, 12, 3, 0, 0, 0], "chen1": [8, 13, 10, 19, 1, 0, 0, 0], "su4": [24, 26, 4, 0, 0, 0, 0, 0], "sou4": [24, 20, 26, 4, 0, 0, 0, 0], "jue1": [15, 26, 10, 1, 0, 0, 0, 0], "weng1": [28, 10, 19, 12, 1, 0, 0, 0], "weng3": [28, 10, 19, 12, 3, 0, 0, 0], "suo": [24, 26, 20, 5, 0, 0, 0, 0], "hei1": [13, 10, 14, 1, 0, 0, 0, 0], "ng3": [19, 12, 3, 0, 0, 0, 0, 0], "n3": [19, 3, 0, 0, 0, 0, 0, 0], "ng4": [19, 12, 4, 0, 0, 0, 0, 0], "die1": [9, 14, 10, 1, 0, 0, 0, 0], "dia3": [9, 14, 6, 3, 0, 0, 0, 0], "di2": [9, 14, 2, 0, 0, 0, 0, 0], "cao2": [8, 6, 20, 2, 0, 0, 0, 0], "lou3": [17, 20, 26, 3, 0, 0, 0, 0], "ga2": [12, 6, 2, 0, 0, 0, 0, 0], "ga3": [12, 6, 3, 0, 0, 0, 0, 0], "ou": [20, 26, 5, 0, 0, 0, 0, 0], "shi1": [24, 13, 14, 1, 0, 0, 0, 0], "de1": [9, 10, 1, 0, 0, 0, 0, 0], "de2": [9, 10, 2, 0, 0, 0, 0, 0], "dei1": [9, 10, 14, 1, 0, 0, 0, 0], "le1": [17, 10, 1, 0, 0, 0, 0, 0], "du1": [9, 26, 1, 0, 0, 0, 0, 0], "chao2": [8, 13, 6, 20, 2, 0, 0, 0], "zhao1": [31, 13, 6, 20, 1, 0, 0, 0], "m1": [18, 1, 0, 0, 0, 0, 0, 0], "ceng1": [8, 10, 19, 12, 1, 0, 0, 0], "ye1": [30, 10, 1, 0, 0, 0, 0, 0], "deng1": [9, 10, 19, 12, 1, 0, 0, 0], "lu1": [17, 26, 1, 0, 0, 0, 0, 0], "zao4": [31, 6, 20, 4, 0, 0, 0, 0], "xue2": [29, 26, 10, 2, 0, 0, 0, 0], "dang1": [9, 6, 19, 12, 1, 0, 0, 0], "sai1": [24, 6, 14, 1, 0, 0, 0, 0], "ca1": [8, 6, 1, 0, 0, 0, 0, 0], "rang3": [23, 6, 19, 12, 3, 0, 0, 0], "rang1": [23, 6, 19, 12, 1, 0, 0, 0], "jiao2": [15, 14, 6, 20, 2, 0, 0, 0], "luo2": [17, 26, 20, 2, 0, 0, 0, 0], "luo": [17, 26, 20, 5, 0, 0, 0, 0], "nang2": [19, 6, 19, 12, 2, 0, 0, 0], "nang1": [19, 6, 19, 12, 1, 0, 0, 0], "nang": [19, 6, 19, 12, 5, 0, 0, 0], "nan1": [19, 6, 19, 1, 0, 0, 0, 0], "hui2": [13, 26, 14, 2, 0, 0, 0, 0], "yin1": [30, 14, 19, 1, 0, 0, 0, 0], "tuan2": [25, 26, 6, 19, 2, 0, 0, 0], "dun4": [9, 26, 19, 4, 0, 0, 0, 0], "kun4": [16, 26, 19, 4, 0, 0, 0, 0], "pu3": [21, 26, 3, 0, 0, 0, 0, 0], "juan1": [15, 26, 6, 19, 1, 0, 0, 0], "huan2": [13, 26, 6, 19, 2, 0, 0, 0], "du4": [9, 26, 4, 0, 0, 0, 0, 0], "de": [9, 10, 5, 0, 0, 0, 0, 0], "quan3": [22, 26, 6, 19, 3, 0, 0, 0], "huai2": [13, 26, 6, 14, 2, 0, 0, 0], "ban3": [7, 6, 19, 3, 0, 0, 0, 0], "fang1": [11, 6, 19, 12, 1, 0, 0, 0], "fang2": [11, 6, 19, 12, 2, 0, 0, 0], "tan1": [25, 6, 19, 1, 0, 0, 0, 0], "yue2": [30, 26, 10, 2, 0, 0, 0, 0], "zhui4": [31, 13, 26, 14, 4, 0, 0, 0], "po1": [21, 20, 1, 0, 0, 0, 0, 0], "chui2": [8, 13, 26, 14, 2, 0, 0, 0], "long3": [17, 20, 19, 12, 3, 0, 0, 0], "duo3": [9, 26, 20, 3, 0, 0, 0, 0], "ken4": [16, 10, 19, 4, 0, 0, 0, 0], "kua3": [16, 26, 6, 3, 0, 0, 0, 0], "man2": [18, 6, 19, 2, 0, 0, 0, 0], "pou3": [21, 20, 26, 3, 0, 0, 0, 0], "dui1": [9, 26, 14, 1, 0, 0, 0, 0], "zui1": [31, 26, 14, 1, 0, 0, 0, 0], "hui1": [13, 26, 14, 1, 0, 0, 0, 0], "pu4": [21, 26, 4, 0, 0, 0, 0, 0], "di3": [9, 14, 3, 0, 0, 0, 0, 0], "chen3": [8, 13, 10, 19, 3, 0, 0, 0], "dang4": [9, 6, 19, 12, 4, 0, 0, 0], "du3": [9, 26, 3, 0, 0, 0, 0, 0], "ta3": [25, 6, 3, 0, 0, 0, 0, 0], "sai4": [24, 6, 14, 4, 0, 0, 0, 0], "chen2": [8, 13, 10, 19, 2, 0, 0, 0], "wen1": [28, 10, 19, 1, 0, 0, 0, 0], "zeng1": [31, 10, 19, 12, 1, 0, 0, 0], "qiang2": [22, 14, 6, 19, 12, 2, 0, 0], "zeng4": [31, 10, 19, 12, 4, 0, 0, 0], "weng4": [28, 10, 19, 12, 4, 0, 0, 0], "lv4": [17, 27, 4, 0, 0, 0, 0, 0], "zhuang1": [31, 13, 26, 6, 19, 12, 1, 0], "wai4": [28, 6, 14, 4, 0, 0, 0, 0], "meng4": [18, 10, 19, 12, 4, 0, 0, 0], "meng2": [18, 10, 19, 12, 2, 0, 0, 0], "da4": [9, 6, 4, 0, 0, 0, 0, 0], "tai4": [25, 6, 14, 4, 0, 0, 0, 0], "yang1": [30, 6, 19, 12, 1, 0, 0, 0], "hang1": [13, 6, 19, 12, 1, 0, 0, 0], "tou2": [25, 20, 26, 2, 0, 0, 0, 0], "tou": [25, 20, 26, 5, 0, 0, 0, 0], "kua1": [16, 26, 6, 1, 0, 0, 0, 0], "kua4": [16, 26, 6, 4, 0, 0, 0, 0], "jia2": [15, 14, 6, 2, 0, 0, 0, 0], "nai4": [19, 6, 14, 4, 0, 0, 0, 0], "zou4": [31, 20, 26, 4, 0, 0, 0, 0], "ben1": [7, 10, 19, 1, 0, 0, 0, 0], "jiang3": [15, 14, 6, 19, 12, 3, 0, 0], "tao4": [25, 6, 20, 4, 0, 0, 0, 0], "tao3": [25, 6, 20, 3, 0, 0, 0, 0], "zang4": [31, 6, 19, 12, 4, 0, 0, 0], "zhuang3": [31, 13, 26, 6, 19, 12, 3, 0], "zheng4": [31, 13, 10, 19, 12, 4, 0, 0], "zun1": [31, 26, 19, 1, 0, 0, 0, 0], "she1": [24, 13, 10, 1, 0, 0, 0, 0], "nv3": [19, 27, 3, 0, 0, 0, 0, 0], "nv4": [19, 27, 4, 0, 0, 0, 0, 0], "jie3": [15, 14, 10, 3, 0, 0, 0, 0], "hao3": [13, 6, 20, 3, 0, 0, 0, 0], "wang4": [28, 6, 19, 12, 4, 0, 0, 0], "ma1": [18, 6, 1, 0, 0, 0, 0, 0], "miao4": [18, 14, 6, 20, 4, 0, 0, 0], "niu1": [19, 14, 26, 1, 0, 0, 0, 0], "tuo3": [25, 26, 20, 3, 0, 0, 0, 0], "pan1": [21, 6, 19, 1, 0, 0, 0, 0], "zu1": [31, 26, 1, 0, 0, 0, 0, 0], "yao4": [30, 6, 20, 4, 0, 0, 0, 0], "zhen3": [31, 13, 10, 19, 3, 0, 0, 0], "rao2": [23, 6, 20, 2, 0, 0, 0, 0], "rao3": [23, 6, 20, 3, 0, 0, 0, 0], "suo3": [24, 26, 20, 3, 0, 0, 0, 0], "niang2": [19, 14, 6, 19, 12, 2, 0, 0], "e1": [10, 1, 0, 0, 0, 0, 0, 0], "lv2": [17, 27, 2, 0, 0, 0, 0, 0], "po2": [21, 20, 2, 0, 0, 0, 0, 0], "hun1": [13, 26, 19, 1, 0, 0, 0, 0], "lan3": [17, 6, 19, 3, 0, 0, 0, 0], "mei2": [18, 10, 14, 2, 0, 0, 0, 0], "yuan4": [30, 26, 6, 19, 4, 0, 0, 0], "bi1": [7, 14, 1, 0, 0, 0, 0, 0], "sao3": [24, 6, 20, 3, 0, 0, 0, 0], "bao2": [7, 6, 20, 2, 0, 0, 0, 0], "pin2": [21, 14, 19, 2, 0, 0, 0, 0], "man1": [18, 6, 19, 1, 0, 0, 0, 0], "man4": [18, 6, 19, 4, 0, 0, 0, 0], "nen4": [19, 10, 19, 4, 0, 0, 0, 0], "qiong2": [22, 14, 20, 19, 12, 2, 0, 0], "xuan2": [29, 26, 6, 19, 2, 0, 0, 0], "rang2": [23, 6, 19, 12, 2, 0, 0, 0], "zi": [31, 14, 5, 0, 0, 0, 0, 0], "kong3": [16, 20, 19, 12, 3, 0, 0, 0], "cun2": [8, 26, 19, 2, 0, 0, 0, 0], "luan2": [17, 26, 6, 19, 2, 0, 0, 0], "xun4": [29, 26, 19, 4, 0, 0, 0, 0], "nao1": [19, 6, 20, 1, 0, 0, 0, 0], "zhai2": [31, 13, 6, 14, 2, 0, 0, 0], "che4": [8, 13, 10, 4, 0, 0, 0, 0], "shou3": [24, 13, 20, 26, 3, 0, 0, 0], "song4": [24, 20, 19, 12, 4, 0, 0, 0], "kuan1": [16, 26, 6, 19, 1, 0, 0, 0], "zong1": [31, 20, 19, 12, 1, 0, 0, 0], "chong3": [8, 13, 20, 19, 12, 3, 0, 0], "xiong4": [29, 14, 20, 19, 12, 4, 0, 0], "jia": [15, 14, 6, 5, 0, 0, 0, 0], "rong2": [23, 20, 19, 12, 2, 0, 0, 0], "xiu3": [29, 14, 26, 3, 0, 0, 0, 0], "cun3": [8, 26, 19, 3, 0, 0, 0, 0], "xun2": [29, 26, 19, 2, 0, 0, 0, 0], "xin2": [29, 14, 19, 2, 0, 0, 0, 0], "she4": [24, 13, 10, 4, 0, 0, 0, 0], "shuan4": [24, 13, 26, 6, 19, 4, 0, 0], "xiao3": [29, 14, 6, 20, 3, 0, 0, 0], "shao3": [24, 13, 6, 20, 3, 0, 0, 0], "er3": [10, 23, 3, 0, 0, 0, 0, 0], "ga4": [12, 6, 4, 0, 0, 0, 0, 0], "che3": [8, 13, 10, 3, 0, 0, 0, 0], "kao1": [16, 6, 20, 1, 0, 0, 0, 0], "niao4": [19, 14, 6, 20, 4, 0, 0, 0], "sui1": [24, 26, 14, 1, 0, 0, 0, 0], "zhan3": [31, 13, 6, 19, 3, 0, 0, 0], "shu3": [24, 13, 26, 3, 0, 0, 0, 0], "zhun1": [31, 13, 26, 19, 1, 0, 0, 0], "sui4": [24, 26, 14, 4, 0, 0, 0, 0], "cen2": [8, 10, 19, 2, 0, 0, 0, 0], "gang3": [12, 6, 19, 12, 3, 0, 0, 0], "min2": [18, 14, 19, 2, 0, 0, 0, 0], "an4": [6, 19, 4, 0, 0, 0, 0, 0], "kong1": [16, 20, 19, 12, 1, 0, 0, 0], "chong2": [8, 13, 20, 19, 12, 2, 0, 0], "wai3": [28, 6, 14, 3, 0, 0, 0, 0], "cuo2": [8, 26, 20, 2, 0, 0, 0, 0], "chuan1": [8, 13, 26, 6, 19, 1, 0, 0], "qiao3": [22, 14, 6, 20, 3, 0, 0, 0], "chai1": [8, 13, 6, 14, 1, 0, 0, 0], "chai4": [8, 13, 6, 14, 4, 0, 0, 0], "cuo1": [8, 26, 20, 1, 0, 0, 0, 0], "shuai4": [24, 13, 26, 6, 14, 4, 0, 0], "fan1": [11, 6, 19, 1, 0, 0, 0, 0], "pa4": [21, 6, 4, 0, 0, 0, 0, 0], "tie4": [25, 14, 10, 4, 0, 0, 0, 0], "tie3": [25, 14, 10, 3, 0, 0, 0, 0], "zhou3": [31, 13, 20, 26, 3, 0, 0, 0], "bang1": [7, 6, 19, 12, 1, 0, 0, 0], "huang3": [13, 26, 6, 19, 12, 3, 0, 0], "chuang2": [8, 13, 26, 6, 19, 12, 2, 0], "nian2": [19, 14, 6, 19, 2, 0, 0, 0], "guan3": [12, 26, 6, 19, 3, 0, 0, 0], "guang3": [12, 26, 6, 19, 12, 3, 0, 0], "ku4": [16, 26, 4, 0, 0, 0, 0, 0], "xiang2": [29, 14, 6, 19, 12, 2, 0, 0], "zhe1": [31, 13, 10, 1, 0, 0, 0, 0], "kang1": [16, 6, 19, 12, 1, 0, 0, 0], "yong2": [30, 20, 19, 12, 2, 0, 0, 0], "lang2": [17, 6, 19, 12, 2, 0, 0, 0], "kuo4": [16, 26, 20, 4, 0, 0, 0, 0], "kai1": [16, 6, 14, 1, 0, 0, 0, 0], "long4": [17, 20, 19, 12, 4, 0, 0, 0], "tui2": [25, 26, 14, 2, 0, 0, 0, 0], "zhang1": [31, 13, 6, 19, 12, 1, 0, 0], "mi2": [18, 14, 2, 0, 0, 0, 0, 0], "qiang3": [22, 14, 6, 19, 12, 3, 0, 0], "cai3": [8, 6, 14, 3, 0, 0, 0, 0], "wang3": [28, 6, 19, 12, 3, 0, 0, 0], "dei3": [9, 10, 14, 3, 0, 0, 0, 0], "zong4": [31, 20, 19, 12, 4, 0, 0, 0], "zong3": [31, 20, 19, 12, 3, 0, 0, 0], "xin1": [29, 14, 19, 1, 0, 0, 0, 0], "ren3": [23, 10, 19, 3, 0, 0, 0, 0], "chan4": [8, 13, 6, 19, 4, 0, 0, 0], "qian3": [22, 14, 6, 19, 3, 0, 0, 0], "keng3": [16, 10, 19, 12, 3, 0, 0, 0], "tui1": [25, 26, 14, 1, 0, 0, 0, 0], "tei1": [25, 10, 14, 1, 0, 0, 0, 0], "mang2": [18, 6, 19, 12, 2, 0, 0, 0], "song3": [24, 20, 19, 12, 3, 0, 0, 0], "zen3": [31, 10, 19, 3, 0, 0, 0, 0], "yang4": [30, 6, 19, 12, 4, 0, 0, 0], "nu4": [19, 26, 4, 0, 0, 0, 0, 0], "guai4": [12, 26, 6, 14, 4, 0, 0, 0], "nin2": [19, 14, 19, 2, 0, 0, 0, 0], "heng2": [13, 10, 19, 12, 2, 0, 0, 0], "lian4": [17, 14, 6, 19, 4, 0, 0, 0], "hen4": [13, 10, 19, 4, 0, 0, 0, 0], "en1": [10, 19, 1, 0, 0, 0, 0, 0], "nao3": [19, 6, 20, 3, 0, 0, 0, 0], "qiao1": [22, 14, 6, 20, 1, 0, 0, 0], "hui3": [13, 26, 14, 3, 0, 0, 0, 0], "min3": [18, 14, 19, 3, 0, 0, 0, 0], "men1": [18, 10, 19, 1, 0, 0, 0, 0], "qing2": [22, 14, 19, 12, 2, 0, 0, 0], "can3": [8, 6, 19, 3, 0, 0, 0, 0], "can2": [8, 6, 19, 2, 0, 0, 0, 0], "gan3": [12, 6, 19, 3, 0, 0, 0, 0], "leng4": [17, 10, 19, 12, 4, 0, 0, 0], "huang1": [13, 26, 6, 19, 12, 1, 0, 0], "huang": [13, 26, 6, 19, 12, 5, 0, 0], "shen4": [24, 13, 10, 19, 4, 0, 0, 0], "bie1": [7, 14, 10, 1, 0, 0, 0, 0], "han1": [13, 6, 19, 1, 0, 0, 0, 0], "dong3": [9, 20, 19, 12, 3, 0, 0, 0], "qu": [22, 26, 5, 0, 0, 0, 0, 0], "reng1": [23, 10, 19, 12, 1, 0, 0, 0], "zang1": [31, 6, 19, 12, 1, 0, 0, 0], "chuo1": [8, 13, 26, 20, 1, 0, 0, 0], "cai2": [8, 6, 14, 2, 0, 0, 0, 0], "zha3": [31, 13, 6, 3, 0, 0, 0, 0], "pa2": [21, 6, 2, 0, 0, 0, 0, 0], "da3": [9, 6, 3, 0, 0, 0, 0, 0], "reng4": [23, 10, 19, 12, 4, 0, 0, 0], "kang2": [16, 6, 19, 12, 2, 0, 0, 0], "niu3": [19, 14, 26, 3, 0, 0, 0, 0], "fen3": [11, 10, 19, 3, 0, 0, 0, 0], "zhao3": [31, 13, 6, 20, 3, 0, 0, 0], "ba3": [7, 6, 3, 0, 0, 0, 0, 0], "zhua1": [31, 13, 26, 6, 1, 0, 0, 0], "dou3": [9, 20, 26, 3, 0, 0, 0, 0], "pao1": [21, 6, 20, 1, 0, 0, 0, 0], "kou1": [16, 20, 26, 1, 0, 0, 0, 0], "lun1": [17, 26, 19, 1, 0, 0, 0, 0], "mo3": [18, 20, 3, 0, 0, 0, 0, 0], "chou1": [8, 13, 20, 26, 1, 0, 0, 0], "nian1": [19, 14, 6, 19, 1, 0, 0, 0], "nian3": [19, 14, 6, 19, 3, 0, 0, 0], "pai1": [21, 6, 14, 1, 0, 0, 0, 0], "lin1": [17, 14, 19, 1, 0, 0, 0, 0], "ling1": [17, 14, 19, 12, 1, 0, 0, 0], "guai3": [12, 26, 6, 14, 3, 0, 0, 0], "ao3": [6, 20, 3, 0, 0, 0, 0, 0], "niu4": [19, 14, 26, 4, 0, 0, 0, 0], "pin1": [21, 14, 19, 1, 0, 0, 0, 0], "ning3": [19, 14, 19, 12, 3, 0, 0, 0], "shuan1": [24, 13, 26, 6, 19, 1, 0, 0], "kao3": [16, 6, 20, 3, 0, 0, 0, 0], "zhuai1": [31, 13, 26, 6, 14, 1, 0, 0], "zhuai4": [31, 13, 26, 6, 14, 4, 0, 0], "na2": [19, 6, 2, 0, 0, 0, 0, 0], "tiao3": [25, 14, 6, 20, 3, 0, 0, 0], "tiao": [25, 14, 6, 20, 5, 0, 0, 0], "nao2": [19, 6, 20, 2, 0, 0, 0, 0], "ting3": [25, 14, 19, 12, 3, 0, 0, 0], "kun3": [16, 26, 19, 3, 0, 0, 0, 0], "hun2": [13, 26, 19, 2, 0, 0, 0, 0], "shao1": [24, 13, 6, 20, 1, 0, 0, 0], "nie1": [19, 14, 10, 1, 0, 0, 0, 0], "lao1": [17, 6, 20, 1, 0, 0, 0, 0], "sun3": [24, 26, 19, 3, 0, 0, 0, 0], "peng3": [21, 10, 19, 12, 3, 0, 0, 0], "she3": [24, 13, 10, 3, 0, 0, 0, 0], "hen2": [13, 10, 19, 2, 0, 0, 0, 0], "zhang3": [31, 13, 6, 19, 12, 3, 0, 0], "qia1": [22, 14, 6, 1, 0, 0, 0, 0], "pai3": [21, 6, 14, 3, 0, 0, 0, 0], "lve4": [17, 27, 10, 4, 0, 0, 0, 0], "lve3": [17, 27, 10, 3, 0, 0, 0, 0], "kong4": [16, 20, 19, 12, 4, 0, 0, 0], "bai1": [7, 6, 14, 1, 0, 0, 0, 0], "shan3": [24, 13, 6, 19, 3, 0, 0, 0], "rou2": [23, 20, 26, 2, 0, 0, 0, 0], "miao2": [18, 14, 6, 20, 2, 0, 0, 0], "chuai1": [8, 13, 26, 6, 14, 1, 0, 0], "chuai3": [8, 13, 26, 6, 14, 3, 0, 0], "zhui1": [31, 13, 26, 14, 1, 0, 0, 0], "ye2": [30, 10, 2, 0, 0, 0, 0, 0], "lou1": [17, 20, 26, 1, 0, 0, 0, 0], "cuo3": [8, 26, 20, 3, 0, 0, 0, 0], "gao3": [12, 6, 20, 3, 0, 0, 0, 0], "kao4": [16, 6, 20, 4, 0, 0, 0, 0], "en4": [10, 19, 4, 0, 0, 0, 0, 0], "bin4": [7, 14, 19, 4, 0, 0, 0, 0], "shuai1": [24, 13, 26, 6, 14, 1, 0, 0], "zui4": [31, 26, 14, 4, 0, 0, 0, 0], "mo1": [18, 20, 1, 0, 0, 0, 0, 0], "sen1": [24, 10, 19, 1, 0, 0, 0, 0], "pie1": [21, 14, 10, 1, 0, 0, 0, 0], "sa3": [24, 6, 3, 0, 0, 0, 0, 0], "rao4": [23, 6, 20, 4, 0, 0, 0, 0], "liao1": [17, 14, 6, 20, 1, 0, 0, 0], "bo3": [7, 20, 3, 0, 0, 0, 0, 0], "zuan1": [31, 26, 6, 19, 1, 0, 0, 0], "chua1": [8, 13, 26, 6, 1, 0, 0, 0], "suan4": [24, 26, 6, 19, 4, 0, 0, 0], "cao1": [8, 6, 20, 1, 0, 0, 0, 0], "bo4": [7, 20, 4, 0, 0, 0, 0, 0], "zan3": [31, 6, 19, 3, 0, 0, 0, 0], "cuan2": [8, 26, 6, 19, 2, 0, 0, 0], "rang4": [23, 6, 19, 12, 4, 0, 0, 0], "luo3": [17, 26, 20, 3, 0, 0, 0, 0], "zan4": [31, 6, 19, 4, 0, 0, 0, 0], "nan4": [19, 6, 19, 4, 0, 0, 0, 0], "zuan4": [31, 26, 6, 19, 4, 0, 0, 0], "gai3": [12, 6, 14, 3, 0, 0, 0, 0], "fang4": [11, 6, 19, 12, 4, 0, 0, 0], "lian3": [17, 14, 6, 19, 3, 0, 0, 0], "cheng4": [8, 13, 10, 19, 12, 4, 0, 0], "san4": [24, 6, 19, 4, 0, 0, 0, 0], "wen2": [28, 10, 19, 2, 0, 0, 0, 0], "zhe4": [31, 13, 10, 4, 0, 0, 0, 0], "duan4": [9, 26, 6, 19, 4, 0, 0, 0], "ri4": [23, 14, 4, 0, 0, 0, 0, 0], "zao3": [31, 6, 20, 3, 0, 0, 0, 0], "hun4": [13, 26, 19, 4, 0, 0, 0, 0], "chun1": [8, 13, 26, 19, 1, 0, 0, 0], "chun3": [8, 13, 26, 19, 3, 0, 0, 0], "huang4": [13, 26, 6, 19, 12, 4, 0, 0], "shai4": [24, 13, 6, 14, 4, 0, 0, 0], "yun1": [30, 26, 19, 1, 0, 0, 0, 0], "gui3": [12, 26, 14, 3, 0, 0, 0, 0], "nuan3": [19, 26, 6, 19, 3, 0, 0, 0], "yue1": [30, 26, 10, 1, 0, 0, 0, 0], "lang3": [17, 6, 19, 12, 3, 0, 0, 0], "mang3": [18, 6, 19, 12, 3, 0, 0, 0], "ben3": [7, 10, 19, 3, 0, 0, 0, 0], "zhu2": [31, 13, 26, 2, 0, 0, 0, 0], "po4": [21, 20, 4, 0, 0, 0, 0, 0], "cun1": [8, 26, 19, 1, 0, 0, 0, 0], "shuo2": [24, 13, 26, 20, 2, 0, 0, 0], "gang4": [12, 6, 19, 12, 4, 0, 0, 0], "gou3": [12, 20, 26, 3, 0, 0, 0, 0], "mou3": [18, 20, 26, 3, 0, 0, 0, 0], "zhou2": [31, 13, 20, 26, 2, 0, 0, 0], "liu3": [17, 14, 26, 3, 0, 0, 0, 0], "gen1": [12, 10, 19, 1, 0, 0, 0, 0], "gun4": [12, 26, 19, 4, 0, 0, 0, 0], "leng2": [17, 10, 19, 12, 2, 0, 0, 0], "leng1": [17, 10, 19, 12, 1, 0, 0, 0], "kuan3": [16, 26, 6, 19, 3, 0, 0, 0], "peng4": [21, 10, 19, 12, 4, 0, 0, 0], "mei3": [18, 10, 14, 3, 0, 0, 0, 0], "bang3": [7, 6, 19, 12, 3, 0, 0, 0], "zao1": [31, 6, 20, 1, 0, 0, 0, 0], "biao4": [7, 14, 6, 20, 4, 0, 0, 0], "mu2": [18, 26, 2, 0, 0, 0, 0, 0], "heng4": [13, 10, 19, 12, 4, 0, 0, 0], "huan1": [13, 26, 6, 19, 1, 0, 0, 0], "ê2": [0, 0, 0, 0, 0, 0, 0, 0], "ei2": [10, 14, 2, 0, 0, 0, 0, 0], "ê3": [0, 0, 0, 0, 0, 0, 0, 0], "ei3": [10, 14, 3, 0, 0, 0, 0, 0], "ê4": [0, 0, 0, 0, 0, 0, 0, 0], "ei4": [10, 14, 4, 0, 0, 0, 0, 0], "ê1": [0, 0, 0, 0, 0, 0, 0, 0], "ei1": [10, 14, 1, 0, 0, 0, 0, 0], "qin4": [22, 14, 19, 4, 0, 0, 0, 0], "ci3": [8, 14, 3, 0, 0, 0, 0, 0], "si3": [24, 14, 3, 0, 0, 0, 0, 0], "du2": [9, 26, 2, 0, 0, 0, 0, 0], "shui3": [24, 13, 26, 14, 3, 0, 0, 0], "ting4": [25, 14, 19, 12, 4, 0, 0, 0], "cuan1": [8, 26, 6, 19, 1, 0, 0, 0], "que1": [22, 26, 10, 1, 0, 0, 0, 0], "xue4": [29, 26, 10, 4, 0, 0, 0, 0], "pen2": [21, 10, 19, 2, 0, 0, 0, 0], "fa3": [11, 6, 3, 0, 0, 0, 0, 0], "feng3": [11, 10, 19, 12, 3, 0, 0, 0], "pao4": [21, 6, 20, 4, 0, 0, 0, 0], "beng4": [7, 10, 19, 12, 4, 0, 0, 0], "sen3": [24, 10, 19, 3, 0, 0, 0, 0], "cui3": [8, 26, 14, 3, 0, 0, 0, 0], "guo1": [12, 26, 20, 1, 0, 0, 0, 0], "lang4": [17, 6, 19, 12, 4, 0, 0, 0], "hai3": [13, 6, 14, 3, 0, 0, 0, 0], "run4": [23, 26, 19, 4, 0, 0, 0, 0], "pou2": [21, 20, 26, 2, 0, 0, 0, 0], "nao4": [19, 6, 20, 4, 0, 0, 0, 0], "shuang4": [24, 13, 26, 6, 19, 12, 4, 0], "lun3": [17, 26, 19, 3, 0, 0, 0, 0], "tian4": [25, 14, 6, 19, 4, 0, 0, 0], "tuan1": [25, 26, 6, 19, 1, 0, 0, 0], "tang4": [25, 6, 19, 12, 4, 0, 0, 0], "man3": [18, 6, 19, 3, 0, 0, 0, 0], "liu1": [17, 14, 26, 1, 0, 0, 0, 0], "ming3": [18, 14, 19, 12, 3, 0, 0, 0], "chou4": [8, 13, 20, 26, 4, 0, 0, 0], "mie4": [18, 14, 10, 4, 0, 0, 0, 0], "teng2": [25, 10, 19, 12, 2, 0, 0, 0], "lan4": [17, 6, 19, 4, 0, 0, 0, 0], "piao3": [21, 14, 6, 20, 3, 0, 0, 0], "lou4": [17, 20, 26, 4, 0, 0, 0, 0], "cao4": [8, 6, 20, 4, 0, 0, 0, 0], "ruan3": [23, 26, 6, 19, 3, 0, 0, 0], "nuan2": [19, 26, 6, 19, 2, 0, 0, 0], "chen4": [8, 13, 10, 19, 4, 0, 0, 0], "ran2": [23, 6, 19, 2, 0, 0, 0, 0], "xiong2": [29, 14, 20, 19, 12, 2, 0, 0], "shou2": [24, 13, 20, 26, 2, 0, 0, 0], "zhua3": [31, 13, 26, 6, 3, 0, 0, 0], "shuang3": [24, 13, 26, 6, 19, 12, 3, 0], "pian4": [21, 14, 6, 19, 4, 0, 0, 0], "niu2": [19, 14, 26, 2, 0, 0, 0, 0], "hang3": [13, 6, 19, 12, 3, 0, 0, 0], "cai1": [8, 6, 14, 1, 0, 0, 0, 0], "mao1": [18, 6, 20, 1, 0, 0, 0, 0], "wa4": [28, 6, 4, 0, 0, 0, 0, 0], "su1": [24, 26, 1, 0, 0, 0, 0, 0], "shuai3": [24, 13, 26, 6, 14, 3, 0, 0], "beng2": [7, 10, 19, 12, 2, 0, 0, 0], "dang": [9, 6, 19, 12, 5, 0, 0, 0], "yuan3": [30, 26, 6, 19, 3, 0, 0, 0], "nve4": [19, 27, 10, 4, 0, 0, 0, 0], "suan1": [24, 26, 6, 19, 1, 0, 0, 0], "tuan3": [25, 26, 6, 19, 3, 0, 0, 0], "fei2": [11, 10, 14, 2, 0, 0, 0, 0], "bie3": [7, 14, 10, 3, 0, 0, 0, 0], "que2": [22, 26, 10, 2, 0, 0, 0, 0], "bai2": [7, 6, 14, 2, 0, 0, 0, 0], "ang4": [6, 19, 12, 4, 0, 0, 0, 0], "dun3": [9, 26, 19, 3, 0, 0, 0, 0], "xing3": [29, 14, 19, 12, 3, 0, 0, 0], "zhe": [31, 13, 10, 5, 0, 0, 0, 0], "zhao2": [31, 13, 6, 20, 2, 0, 0, 0], "shui4": [24, 13, 26, 14, 4, 0, 0, 0], "meng1": [18, 10, 19, 12, 1, 0, 0, 0], "duan3": [9, 26, 6, 19, 3, 0, 0, 0], "che1": [8, 13, 10, 1, 0, 0, 0, 0], "tui4": [25, 26, 14, 4, 0, 0, 0, 0], "tuan4": [25, 26, 6, 19, 4, 0, 0, 0], "qiong1": [22, 14, 20, 19, 12, 1, 0, 0], "zhai3": [31, 13, 6, 14, 3, 0, 0, 0], "cuan4": [8, 26, 6, 19, 4, 0, 0, 0], "neng2": [19, 10, 19, 12, 2, 0, 0, 0], "duan1": [9, 26, 6, 19, 1, 0, 0, 0], "ti": [25, 14, 5, 0, 0, 0, 0, 0], "deng3": [9, 10, 19, 12, 3, 0, 0, 0], "shai1": [24, 13, 6, 14, 1, 0, 0, 0], "zan1": [31, 6, 19, 1, 0, 0, 0, 0], "cu1": [8, 26, 1, 0, 0, 0, 0, 0], "qiu3": [22, 14, 26, 3, 0, 0, 0, 0], "gei3": [12, 10, 14, 3, 0, 0, 0, 0], "sui2": [24, 26, 14, 2, 0, 0, 0, 0], "rui2": [23, 26, 14, 2, 0, 0, 0, 0], "huan3": [13, 26, 6, 19, 3, 0, 0, 0], "sheng2": [24, 13, 10, 19, 12, 2, 0, 0], "miu4": [18, 14, 26, 4, 0, 0, 0, 0], "zuan3": [31, 26, 6, 19, 3, 0, 0, 0], "qun2": [22, 26, 19, 2, 0, 0, 0, 0], "shua3": [24, 13, 26, 6, 3, 0, 0, 0], "nou4": [19, 20, 26, 4, 0, 0, 0, 0], "ping4": [21, 14, 19, 12, 4, 0, 0, 0], "rou4": [23, 20, 26, 4, 0, 0, 0, 0], "ang1": [6, 19, 12, 1, 0, 0, 0, 0], "pang4": [21, 6, 19, 12, 4, 0, 0, 0], "nai2": [19, 6, 14, 2, 0, 0, 0, 0], "tui3": [25, 26, 14, 3, 0, 0, 0, 0], "pang3": [21, 6, 19, 12, 3, 0, 0, 0], "cang2": [8, 6, 19, 12, 2, 0, 0, 0], "gen3": [12, 10, 19, 3, 0, 0, 0, 0], "shai3": [24, 13, 6, 14, 3, 0, 0, 0], "cao3": [8, 6, 20, 3, 0, 0, 0, 0], "zou1": [31, 20, 26, 1, 0, 0, 0, 0], "re2": [23, 10, 2, 0, 0, 0, 0, 0], "ku3": [16, 26, 3, 0, 0, 0, 0, 0], "rong1": [23, 20, 19, 12, 1, 0, 0, 0], "bi2": [7, 14, 2, 0, 0, 0, 0, 0], "cai4": [8, 6, 14, 4, 0, 0, 0, 0], "cang3": [8, 6, 19, 12, 3, 0, 0, 0], "hao1": [13, 6, 20, 1, 0, 0, 0, 0], "xu": [29, 26, 5, 0, 0, 0, 0, 0], "rui3": [23, 26, 14, 3, 0, 0, 0, 0], "ha2": [13, 6, 2, 0, 0, 0, 0, 0], "niao3": [19, 14, 6, 20, 3, 0, 0, 0], "shang": [24, 13, 6, 19, 12, 5, 0, 0], "tun4": [25, 26, 19, 4, 0, 0, 0, 0], "shuo1": [24, 13, 26, 20, 1, 0, 0, 0], "shui2": [24, 13, 26, 14, 2, 0, 0, 0], "shei2": [24, 13, 10, 14, 2, 0, 0, 0], "tou3": [25, 20, 26, 3, 0, 0, 0, 0], "zei2": [31, 10, 14, 2, 0, 0, 0, 0], "zou3": [31, 20, 26, 3, 0, 0, 0, 0], "cou3": [8, 20, 26, 3, 0, 0, 0, 0], "pao3": [21, 6, 20, 3, 0, 0, 0, 0], "zhuai3": [31, 13, 26, 6, 14, 3, 0, 0], "rou3": [23, 20, 26, 3, 0, 0, 0, 0], "ceng4": [8, 10, 19, 12, 4, 0, 0, 0], "zun2": [31, 26, 19, 2, 0, 0, 0, 0], "qun3": [22, 26, 19, 3, 0, 0, 0, 0], "jiu": [15, 14, 26, 5, 0, 0, 0, 0], "jue3": [15, 26, 10, 3, 0, 0, 0, 0], "zhuan3": [31, 13, 26, 6, 19, 3, 0, 0], "bian": [7, 14, 6, 19, 5, 0, 0, 0], "zhei4": [31, 13, 10, 14, 4, 0, 0, 0], "tou4": [25, 20, 26, 4, 0, 0, 0, 0], "qun1": [22, 26, 19, 1, 0, 0, 0, 0], "guo": [12, 26, 20, 5, 0, 0, 0, 0], "niang4": [19, 14, 6, 19, 12, 4, 0, 0], "pan3": [21, 6, 19, 3, 0, 0, 0, 0], "chuang3": [8, 13, 26, 6, 19, 12, 3, 0], "long1": [17, 20, 19, 12, 1, 0, 0, 0], "xue3": [29, 26, 10, 3, 0, 0, 0, 0], "zhun4": [31, 13, 26, 19, 4, 0, 0, 0], "gu2": [12, 26, 2, 0, 0, 0, 0, 0], "zang3": [31, 6, 19, 12, 3, 0, 0, 0], "sui3": [24, 26, 14, 3, 0, 0, 0, 0], "song2": [24, 20, 19, 12, 2, 0, 0, 0], "hou1": [13, 20, 26, 1, 0, 0, 0, 0], "tai3": [25, 6, 14, 3, 0, 0, 0, 0], "jiang": [15, 14, 6, 19, 12, 5, 0, 0], "shui": [24, 13, 26, 14, 5, 0, 0, 0], "pou4": [21, 20, 26, 4, 0, 0, 0, 0], "zun3": [31, 26, 19, 3, 0, 0, 0, 0], "mou1": [18, 20, 26, 1, 0, 0, 0, 0], "pian3": [21, 14, 6, 19, 3, 0, 0, 0], "nang3": [19, 6, 19, 12, 3, 0, 0, 0], "xin3": [29, 14, 19, 3, 0, 0, 0, 0], "xiong3": [29, 14, 20, 19, 12, 3, 0, 0], "mang1": [18, 6, 19, 12, 1, 0, 0, 0], "chai3": [8, 13, 6, 14, 3, 0, 0, 0], "den4": [9, 10, 19, 4, 0, 0, 0, 0], "chen": [8, 13, 10, 19, 5, 0, 0, 0], "an2": [6, 19, 2, 0, 0, 0, 0, 0], "pei3": [21, 10, 14, 3, 0, 0, 0, 0], "kuai3": [16, 26, 6, 14, 3, 0, 0, 0], "sun4": [24, 26, 19, 4, 0, 0, 0, 0], "nuo3": [19, 26, 20, 3, 0, 0, 0, 0], "hun3": [13, 26, 19, 3, 0, 0, 0, 0], "kuang3": [16, 26, 6, 19, 12, 3, 0, 0], "nie2": [19, 14, 10, 2, 0, 0, 0, 0], "fou2": [11, 20, 26, 2, 0, 0, 0, 0], "qiu4": [22, 14, 26, 4, 0, 0, 0, 0]}
config/pinyin_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"idx2char": ["0", "1", "2", "3", "4", "5", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z"], "char2idx": {"0": 0, "1": 1, "2": 2, "3": 3, "4": 4, "5": 5, "a": 6, "b": 7, "c": 8, "d": 9, "e": 10, "f": 11, "g": 12, "h": 13, "i": 14, "j": 15, "k": 16, "l": 17, "m": 18, "n": 19, "o": 20, "p": 21, "q": 22, "r": 23, "s": 24, "t": 25, "u": 26, "v": 27, "w": 28, "x": 29, "y": 30, "z": 31}}
config/方正古隶繁体.ttf24.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9b354344e73ed5afc3d9a922d9cef2fc6f53bfff17a5876b3ffb74e74df02b4f
3
+ size 107071616
modeling_glycebert.py ADDED
@@ -0,0 +1,532 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ @file : modeling_glycebert.py
5
+ @author: zijun
6
+ @contact : zijun_sun@shannonai.com
7
+ @date : 2020/9/6 18:50
8
+ @version: 1.0
9
+ @desc : ChineseBert Model
10
+ """
11
+ import warnings
12
+
13
+ import torch
14
+ from torch import nn
15
+ from torch.nn import CrossEntropyLoss, MSELoss
16
+ try:
17
+ from transformers.modeling_bert import BertEncoder, BertPooler, BertOnlyMLMHead, BertPreTrainedModel, BertModel
18
+ except:
19
+ from transformers.models.bert.modeling_bert import BertEncoder, BertPooler, BertOnlyMLMHead, BertPreTrainedModel, BertModel
20
+
21
+ from transformers.modeling_outputs import BaseModelOutputWithPooling, MaskedLMOutput, SequenceClassifierOutput, \
22
+ QuestionAnsweringModelOutput, TokenClassifierOutput
23
+
24
+ from models.fusion_embedding import FusionBertEmbeddings
25
+ from models.classifier import BertMLP
26
+
27
+ class GlyceBertModel(BertModel):
28
+ r"""
29
+ Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs:
30
+ **last_hidden_state**: ``torch.FloatTensor`` of shape ``(batch_size, sequence_length, hidden_size)``
31
+ Sequence of hidden-states at the output of the last layer of the models.
32
+ **pooler_output**: ``torch.FloatTensor`` of shape ``(batch_size, hidden_size)``
33
+ Last layer hidden-state of the first token of the sequence (classification token)
34
+ further processed by a Linear layer and a Tanh activation function. The Linear
35
+ layer weights are trained from the next sentence prediction (classification)
36
+ objective during Bert pretraining. This output is usually *not* a good summary
37
+ of the semantic content of the input, you're often better with averaging or pooling
38
+ the sequence of hidden-states for the whole input sequence.
39
+ **hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``)
40
+ list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings)
41
+ of shape ``(batch_size, sequence_length, hidden_size)``:
42
+ Hidden-states of the models at the output of each layer plus the initial embedding outputs.
43
+ **attentions**: (`optional`, returned when ``config.output_attentions=True``)
44
+ list of ``torch.FloatTensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``:
45
+ Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads.
46
+
47
+ Examples::
48
+
49
+ tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')
50
+ models = BertModel.from_pretrained('bert-base-uncased')
51
+ input_ids = torch.tensor(tokenizer.encode("Hello, my dog is cute", add_special_tokens=True)).unsqueeze(0) # Batch size 1
52
+ outputs = models(input_ids)
53
+ last_hidden_states = outputs[0] # The last hidden-state is the first element of the output tuple
54
+
55
+ """
56
+
57
+ def __init__(self, config):
58
+ super(GlyceBertModel, self).__init__(config)
59
+ self.config = config
60
+
61
+ self.embeddings = FusionBertEmbeddings(config)
62
+ self.encoder = BertEncoder(config)
63
+ self.pooler = BertPooler(config)
64
+
65
+ self.init_weights()
66
+
67
+ def forward(
68
+ self,
69
+ input_ids=None,
70
+ pinyin_ids=None,
71
+ attention_mask=None,
72
+ token_type_ids=None,
73
+ position_ids=None,
74
+ head_mask=None,
75
+ inputs_embeds=None,
76
+ encoder_hidden_states=None,
77
+ encoder_attention_mask=None,
78
+ output_attentions=None,
79
+ output_hidden_states=None,
80
+ return_dict=None,
81
+ ):
82
+ r"""
83
+ encoder_hidden_states (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`):
84
+ Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention
85
+ if the models is configured as a decoder.
86
+ encoder_attention_mask (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):
87
+ Mask to avoid performing attention on the padding token indices of the encoder input. This mask
88
+ is used in the cross-attention if the models is configured as a decoder.
89
+ Mask values selected in ``[0, 1]``:
90
+
91
+ - 1 for tokens that are **not masked**,
92
+ - 0 for tokens that are **masked**.
93
+ """
94
+ output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
95
+ output_hidden_states = (
96
+ output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
97
+ )
98
+ return_dict = return_dict if return_dict is not None else self.config.use_return_dict
99
+
100
+ if input_ids is not None and inputs_embeds is not None:
101
+ raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time")
102
+ elif input_ids is not None:
103
+ input_shape = input_ids.size()
104
+ elif inputs_embeds is not None:
105
+ input_shape = inputs_embeds.size()[:-1]
106
+ else:
107
+ raise ValueError("You have to specify either input_ids or inputs_embeds")
108
+
109
+ device = input_ids.device if input_ids is not None else inputs_embeds.device
110
+
111
+ if attention_mask is None:
112
+ attention_mask = torch.ones(input_shape, device=device)
113
+ if token_type_ids is None:
114
+ token_type_ids = torch.zeros(input_shape, dtype=torch.long, device=device)
115
+
116
+ # We can provide a self-attention mask of dimensions [batch_size, from_seq_length, to_seq_length]
117
+ # ourselves in which case we just need to make it broadcastable to all heads.
118
+ extended_attention_mask: torch.Tensor = self.get_extended_attention_mask(attention_mask, input_shape, device)
119
+
120
+ # If a 2D or 3D attention mask is provided for the cross-attention
121
+ # we need to make broadcastable to [batch_size, num_heads, seq_length, seq_length]
122
+ if self.config.is_decoder and encoder_hidden_states is not None:
123
+ encoder_batch_size, encoder_sequence_length, _ = encoder_hidden_states.size()
124
+ encoder_hidden_shape = (encoder_batch_size, encoder_sequence_length)
125
+ if encoder_attention_mask is None:
126
+ encoder_attention_mask = torch.ones(encoder_hidden_shape, device=device)
127
+ encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask)
128
+ else:
129
+ encoder_extended_attention_mask = None
130
+
131
+ # Prepare head mask if needed
132
+ # 1.0 in head_mask indicate we keep the head
133
+ # attention_probs has shape bsz x n_heads x N x N
134
+ # input head_mask has shape [num_heads] or [num_hidden_layers x num_heads]
135
+ # and head_mask is converted to shape [num_hidden_layers x batch x num_heads x seq_length x seq_length]
136
+ head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers)
137
+
138
+ embedding_output = self.embeddings(
139
+ input_ids=input_ids, pinyin_ids=pinyin_ids, position_ids=position_ids, token_type_ids=token_type_ids,
140
+ inputs_embeds=inputs_embeds
141
+ )
142
+ encoder_outputs = self.encoder(
143
+ embedding_output,
144
+ attention_mask=extended_attention_mask,
145
+ head_mask=head_mask,
146
+ encoder_hidden_states=encoder_hidden_states,
147
+ encoder_attention_mask=encoder_extended_attention_mask,
148
+ output_attentions=output_attentions,
149
+ output_hidden_states=output_hidden_states,
150
+ return_dict=return_dict,
151
+ )
152
+ sequence_output = encoder_outputs[0]
153
+ pooled_output = self.pooler(sequence_output) if self.pooler is not None else None
154
+
155
+ if not return_dict:
156
+ return (sequence_output, pooled_output) + encoder_outputs[1:]
157
+
158
+ return BaseModelOutputWithPooling(
159
+ last_hidden_state=sequence_output,
160
+ pooler_output=pooled_output,
161
+ hidden_states=encoder_outputs.hidden_states,
162
+ attentions=encoder_outputs.attentions,
163
+ )
164
+
165
+
166
+ class GlyceBertForMaskedLM(BertPreTrainedModel):
167
+ def __init__(self, config):
168
+ super(GlyceBertForMaskedLM, self).__init__(config)
169
+
170
+ self.bert = GlyceBertModel(config)
171
+ self.cls = BertOnlyMLMHead(config)
172
+
173
+ self.init_weights()
174
+
175
+ def get_output_embeddings(self):
176
+ return self.cls.predictions.decoder
177
+
178
+ def forward(
179
+ self,
180
+ input_ids=None,
181
+ pinyin_ids=None,
182
+ attention_mask=None,
183
+ token_type_ids=None,
184
+ position_ids=None,
185
+ head_mask=None,
186
+ inputs_embeds=None,
187
+ encoder_hidden_states=None,
188
+ encoder_attention_mask=None,
189
+ labels=None,
190
+ output_attentions=None,
191
+ output_hidden_states=None,
192
+ return_dict=None,
193
+ **kwargs
194
+ ):
195
+ r"""
196
+ labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):
197
+ Labels for computing the masked language modeling loss.
198
+ Indices should be in ``[-100, 0, ..., config.vocab_size]`` (see ``input_ids`` docstring)
199
+ Tokens with indices set to ``-100`` are ignored (masked), the loss is only computed for the tokens with labels
200
+ in ``[0, ..., config.vocab_size]``
201
+ kwargs (:obj:`Dict[str, any]`, optional, defaults to `{}`):
202
+ Used to hide legacy arguments that have been deprecated.
203
+ """
204
+ if "masked_lm_labels" in kwargs:
205
+ warnings.warn(
206
+ "The `masked_lm_labels` argument is deprecated and will be removed in a future version, use `labels` instead.",
207
+ FutureWarning,
208
+ )
209
+ labels = kwargs.pop("masked_lm_labels")
210
+ assert "lm_labels" not in kwargs, "Use `BertWithLMHead` for autoregressive language modeling task."
211
+ assert kwargs == {}, f"Unexpected keyword arguments: {list(kwargs.keys())}."
212
+
213
+ return_dict = return_dict if return_dict is not None else self.config.use_return_dict
214
+
215
+ outputs = self.bert(
216
+ input_ids,
217
+ pinyin_ids,
218
+ attention_mask=attention_mask,
219
+ token_type_ids=token_type_ids,
220
+ position_ids=position_ids,
221
+ head_mask=head_mask,
222
+ inputs_embeds=inputs_embeds,
223
+ encoder_hidden_states=encoder_hidden_states,
224
+ encoder_attention_mask=encoder_attention_mask,
225
+ output_attentions=output_attentions,
226
+ output_hidden_states=output_hidden_states,
227
+ return_dict=return_dict,
228
+ )
229
+
230
+ sequence_output = outputs[0]
231
+ prediction_scores = self.cls(sequence_output)
232
+
233
+ masked_lm_loss = None
234
+ if labels is not None:
235
+ loss_fct = CrossEntropyLoss() # -100 index = padding token
236
+ masked_lm_loss = loss_fct(prediction_scores.view(-1, self.config.vocab_size), labels.view(-1))
237
+
238
+ if not return_dict:
239
+ output = (prediction_scores,) + outputs[2:]
240
+ return ((masked_lm_loss,) + output) if masked_lm_loss is not None else output
241
+
242
+ return MaskedLMOutput(
243
+ loss=masked_lm_loss,
244
+ logits=prediction_scores,
245
+ hidden_states=outputs.hidden_states,
246
+ attentions=outputs.attentions,
247
+ )
248
+
249
+
250
+ class GlyceBertForSequenceClassification(BertPreTrainedModel):
251
+ def __init__(self, config):
252
+ super().__init__(config)
253
+ self.num_labels = config.num_labels
254
+
255
+ self.bert = GlyceBertModel(config)
256
+ self.dropout = nn.Dropout(config.hidden_dropout_prob)
257
+ self.classifier = nn.Linear(config.hidden_size, config.num_labels)
258
+
259
+ self.init_weights()
260
+
261
+ def forward(
262
+ self,
263
+ input_ids=None,
264
+ pinyin_ids=None,
265
+ attention_mask=None,
266
+ token_type_ids=None,
267
+ position_ids=None,
268
+ head_mask=None,
269
+ inputs_embeds=None,
270
+ labels=None,
271
+ output_attentions=None,
272
+ output_hidden_states=None,
273
+ return_dict=None,
274
+ ):
275
+ r"""
276
+ labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size,)`, `optional`):
277
+ Labels for computing the sequence classification/regression loss.
278
+ Indices should be in :obj:`[0, ..., config.num_labels - 1]`.
279
+ If :obj:`config.num_labels == 1` a regression loss is computed (Mean-Square loss),
280
+ If :obj:`config.num_labels > 1` a classification loss is computed (Cross-Entropy).
281
+ """
282
+ return_dict = return_dict if return_dict is not None else self.config.use_return_dict
283
+
284
+ outputs = self.bert(
285
+ input_ids,
286
+ pinyin_ids,
287
+ attention_mask=attention_mask,
288
+ token_type_ids=token_type_ids,
289
+ position_ids=position_ids,
290
+ head_mask=head_mask,
291
+ inputs_embeds=inputs_embeds,
292
+ output_attentions=output_attentions,
293
+ output_hidden_states=output_hidden_states,
294
+ return_dict=return_dict,
295
+ )
296
+
297
+ pooled_output = outputs[1]
298
+
299
+ pooled_output = self.dropout(pooled_output)
300
+ logits = self.classifier(pooled_output)
301
+
302
+ loss = None
303
+ if labels is not None:
304
+ if self.num_labels == 1:
305
+ # We are doing regression
306
+ loss_fct = MSELoss()
307
+ loss = loss_fct(logits.view(-1), labels.view(-1))
308
+ else:
309
+ loss_fct = CrossEntropyLoss()
310
+ loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1))
311
+
312
+ if not return_dict:
313
+ output = (logits,) + outputs[2:]
314
+ return ((loss,) + output) if loss is not None else output
315
+
316
+ return SequenceClassifierOutput(
317
+ loss=loss,
318
+ logits=logits,
319
+ hidden_states=outputs.hidden_states,
320
+ attentions=outputs.attentions,
321
+ )
322
+
323
+
324
+ class GlyceBertForQuestionAnswering(BertPreTrainedModel):
325
+ """BERT model for Question Answering (span extraction).
326
+ This module is composed of the BERT model with a linear layer on top of
327
+ the sequence output that computes start_logits and end_logits
328
+
329
+ Params:
330
+ `config`: a BertConfig class instance with the configuration to build a new model.
331
+
332
+ Inputs:
333
+ `input_ids`: a torch.LongTensor of shape [batch_size, sequence_length]
334
+ with the word token indices in the vocabulary(see the tokens preprocessing logic in the scripts
335
+ `extract_features.py`, `run_classifier.py` and `run_squad.py`)
336
+ `token_type_ids`: an optional torch.LongTensor of shape [batch_size, sequence_length] with the token
337
+ types indices selected in [0, 1]. Type 0 corresponds to a `sentence A` and type 1 corresponds to
338
+ a `sentence B` token (see BERT paper for more details).
339
+ `attention_mask`: an optional torch.LongTensor of shape [batch_size, sequence_length] with indices
340
+ selected in [0, 1]. It's a mask to be used if the input sequence length is smaller than the max
341
+ input sequence length in the current batch. It's the mask that we typically use for attention when
342
+ a batch has varying length sentences.
343
+ `start_positions`: position of the first token for the labeled span: torch.LongTensor of shape [batch_size].
344
+ Positions are clamped to the length of the sequence and position outside of the sequence are not taken
345
+ into account for computing the loss.
346
+ `end_positions`: position of the last token for the labeled span: torch.LongTensor of shape [batch_size].
347
+ Positions are clamped to the length of the sequence and position outside of the sequence are not taken
348
+ into account for computing the loss.
349
+
350
+ Outputs:
351
+ if `start_positions` and `end_positions` are not `None`:
352
+ Outputs the total_loss which is the sum of the CrossEntropy loss for the start and end token positions.
353
+ if `start_positions` or `end_positions` is `None`:
354
+ Outputs a tuple of start_logits, end_logits which are the logits respectively for the start and end
355
+ position tokens of shape [batch_size, sequence_length].
356
+
357
+ Example usage:
358
+ ```python
359
+ # Already been converted into WordPiece token ids
360
+ input_ids = torch.LongTensor([[31, 51, 99], [15, 5, 0]])
361
+ input_mask = torch.LongTensor([[1, 1, 1], [1, 1, 0]])
362
+ token_type_ids = torch.LongTensor([[0, 0, 1], [0, 1, 0]])
363
+
364
+ config = BertConfig(vocab_size_or_config_json_file=32000, hidden_size=768,
365
+ num_hidden_layers=12, num_attention_heads=12, intermediate_size=3072)
366
+
367
+ model = BertForQuestionAnswering(config)
368
+ start_logits, end_logits = model(input_ids, token_type_ids, input_mask)
369
+ ```
370
+ """
371
+
372
+ def __init__(self, config):
373
+ super().__init__(config)
374
+ self.num_labels = config.num_labels
375
+
376
+ self.bert = GlyceBertModel(config)
377
+ self.qa_outputs = nn.Linear(config.hidden_size, config.num_labels)
378
+
379
+ self.init_weights()
380
+
381
+ def forward(
382
+ self,
383
+ input_ids=None,
384
+ pinyin_ids=None,
385
+ attention_mask=None,
386
+ token_type_ids=None,
387
+ position_ids=None,
388
+ head_mask=None,
389
+ inputs_embeds=None,
390
+ start_positions=None,
391
+ end_positions=None,
392
+ output_attentions=None,
393
+ output_hidden_states=None,
394
+ return_dict=None,
395
+ ):
396
+ r"""
397
+ start_positions (:obj:`torch.LongTensor` of shape :obj:`(batch_size,)`, `optional`):
398
+ Labels for position (index) of the start of the labelled span for computing the token classification loss.
399
+ Positions are clamped to the length of the sequence (:obj:`sequence_length`).
400
+ Position outside of the sequence are not taken into account for computing the loss.
401
+ end_positions (:obj:`torch.LongTensor` of shape :obj:`(batch_size,)`, `optional`):
402
+ Labels for position (index) of the end of the labelled span for computing the token classification loss.
403
+ Positions are clamped to the length of the sequence (:obj:`sequence_length`).
404
+ Position outside of the sequence are not taken into account for computing the loss.
405
+ """
406
+ return_dict = return_dict if return_dict is not None else self.config.use_return_dict
407
+
408
+ outputs = self.bert(
409
+ input_ids,
410
+ pinyin_ids,
411
+ attention_mask=attention_mask,
412
+ token_type_ids=token_type_ids,
413
+ position_ids=position_ids,
414
+ head_mask=head_mask,
415
+ inputs_embeds=inputs_embeds,
416
+ output_attentions=output_attentions,
417
+ output_hidden_states=output_hidden_states,
418
+ return_dict=return_dict,
419
+ )
420
+
421
+ sequence_output = outputs[0]
422
+
423
+ logits = self.qa_outputs(sequence_output)
424
+ start_logits, end_logits = logits.split(1, dim=-1)
425
+ start_logits = start_logits.squeeze(-1)
426
+ end_logits = end_logits.squeeze(-1)
427
+
428
+ total_loss = None
429
+ if start_positions is not None and end_positions is not None:
430
+ # If we are on multi-GPU, split add a dimension
431
+ if len(start_positions.size()) > 1:
432
+ start_positions = start_positions.squeeze(-1)
433
+ if len(end_positions.size()) > 1:
434
+ end_positions = end_positions.squeeze(-1)
435
+ # sometimes the start/end positions are outside our model inputs, we ignore these terms
436
+ ignored_index = start_logits.size(1)
437
+ start_positions.clamp_(0, ignored_index)
438
+ end_positions.clamp_(0, ignored_index)
439
+
440
+ loss_fct = CrossEntropyLoss(ignore_index=ignored_index)
441
+ start_loss = loss_fct(start_logits, start_positions)
442
+ end_loss = loss_fct(end_logits, end_positions)
443
+ total_loss = (start_loss + end_loss) / 2
444
+
445
+ if not return_dict:
446
+ output = (start_logits, end_logits) + outputs[2:]
447
+ return ((total_loss,) + output) if total_loss is not None else output
448
+
449
+ return QuestionAnsweringModelOutput(
450
+ loss=total_loss,
451
+ start_logits=start_logits,
452
+ end_logits=end_logits,
453
+ hidden_states=outputs.hidden_states,
454
+ attentions=outputs.attentions,
455
+ )
456
+
457
+ class GlyceBertForTokenClassification(BertPreTrainedModel):
458
+ def __init__(self, config, mlp=False):
459
+ super().__init__(config)
460
+ self.num_labels = config.num_labels
461
+
462
+ self.bert = GlyceBertModel(config)
463
+ self.dropout = nn.Dropout(config.hidden_dropout_prob)
464
+ if mlp:
465
+ self.classifier = BertMLP(config)
466
+ else:
467
+ self.classifier = nn.Linear(config.hidden_size, config.num_labels)
468
+
469
+ self.init_weights()
470
+
471
+ def forward(self,
472
+ input_ids=None,
473
+ pinyin_ids=None,
474
+ attention_mask=None,
475
+ token_type_ids=None,
476
+ position_ids=None,
477
+ head_mask=None,
478
+ inputs_embeds=None,
479
+ labels=None,
480
+ output_attentions=None,
481
+ output_hidden_states=None,
482
+ return_dict=None,
483
+ ):
484
+ r"""
485
+ labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):
486
+ Labels for computing the token classification loss.
487
+ Indices should be in :obj:`[0, ..., config.num_labels - 1]`.
488
+ """
489
+ return_dict = return_dict if return_dict is not None else self.config.use_return_dict
490
+
491
+ outputs = self.bert(
492
+ input_ids,
493
+ pinyin_ids,
494
+ attention_mask=attention_mask,
495
+ token_type_ids=token_type_ids,
496
+ position_ids=position_ids,
497
+ head_mask=head_mask,
498
+ inputs_embeds=inputs_embeds,
499
+ output_attentions=output_attentions,
500
+ output_hidden_states=output_hidden_states,
501
+ return_dict=return_dict,
502
+ )
503
+
504
+ sequence_output = outputs[0]
505
+
506
+ sequence_output = self.dropout(sequence_output)
507
+ logits = self.classifier(sequence_output)
508
+
509
+ loss = None
510
+ if labels is not None:
511
+ loss_fct = CrossEntropyLoss()
512
+ # Only keep the active parts of the loss
513
+ if attention_mask is not None:
514
+ active_loss = attention_mask.view(-1) == 1
515
+ active_logits = logits.view(-1, self.num_labels)
516
+ active_labels = torch.where(
517
+ active_loss, labels.view(-1), torch.tensor(loss_fct.ignore_index).type_as(labels)
518
+ )
519
+ loss = loss_fct(active_logits, active_labels)
520
+ else:
521
+ loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1))
522
+
523
+ if not return_dict:
524
+ output = (logits,) + outputs[2:]
525
+ return ((loss,) + output) if loss is not None else output
526
+
527
+ return TokenClassifierOutput(
528
+ loss=loss,
529
+ logits=logits,
530
+ hidden_states=outputs.hidden_states,
531
+ attentions=outputs.attentions,
532
+ )
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:80233c57feb3b520febca3b5673565b181d46166578b16a9366ec0d32d31e463
3
+ size 591885271
special_tokens_map.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
7
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer.py ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import os
3
+ from typing import List
4
+
5
+ import tokenizers
6
+ import torch
7
+ from pypinyin import pinyin, Style
8
+
9
+ try:
10
+ from tokenizers import BertWordPieceTokenizer
11
+ except:
12
+ from tokenizers.implementations import BertWordPieceTokenizer
13
+
14
+ from transformers import BertTokenizerFast
15
+
16
+
17
+ class ChineseBertTokenizer(BertTokenizerFast):
18
+
19
+ def __init__(self, **kwargs):
20
+ super(ChineseBertTokenizer, self).__init__(**kwargs)
21
+
22
+ bert_path = self.name_or_path
23
+ vocab_file = os.path.join(bert_path, 'vocab.txt')
24
+ config_path = os.path.join(bert_path, 'config')
25
+ self.max_length = 512
26
+ self.tokenizer = BertWordPieceTokenizer(vocab_file)
27
+
28
+ # load pinyin map dict
29
+ with open(os.path.join(config_path, 'pinyin_map.json'), encoding='utf8') as fin:
30
+ self.pinyin_dict = json.load(fin)
31
+ # load char id map tensor
32
+ with open(os.path.join(config_path, 'id2pinyin.json'), encoding='utf8') as fin:
33
+ self.id2pinyin = json.load(fin)
34
+ # load pinyin map tensor
35
+ with open(os.path.join(config_path, 'pinyin2tensor.json'), encoding='utf8') as fin:
36
+ self.pinyin2tensor = json.load(fin)
37
+
38
+ def tokenize_sentence(self, sentence):
39
+ # convert sentence to ids
40
+ tokenizer_output = self.tokenizer.encode(sentence)
41
+ bert_tokens = tokenizer_output.ids
42
+ pinyin_tokens = self.convert_sentence_to_pinyin_ids(sentence, tokenizer_output)
43
+ # assert,token nums should be same as pinyin token nums
44
+ assert len(bert_tokens) <= self.max_length
45
+ assert len(bert_tokens) == len(pinyin_tokens)
46
+ # convert list to tensor
47
+ input_ids = torch.LongTensor(bert_tokens)
48
+ pinyin_ids = torch.LongTensor(pinyin_tokens).view(-1)
49
+ return input_ids, pinyin_ids
50
+
51
+ def convert_sentence_to_pinyin_ids(self, sentence: str, tokenizer_output: tokenizers.Encoding) -> List[List[int]]:
52
+ # get pinyin of a sentence
53
+ pinyin_list = pinyin(sentence, style=Style.TONE3, heteronym=True, errors=lambda x: [['not chinese'] for _ in x])
54
+ pinyin_locs = {}
55
+ # get pinyin of each location
56
+ for index, item in enumerate(pinyin_list):
57
+ pinyin_string = item[0]
58
+ # not a Chinese character, pass
59
+ if pinyin_string == "not chinese":
60
+ continue
61
+ if pinyin_string in self.pinyin2tensor:
62
+ pinyin_locs[index] = self.pinyin2tensor[pinyin_string]
63
+ else:
64
+ ids = [0] * 8
65
+ for i, p in enumerate(pinyin_string):
66
+ if p not in self.pinyin_dict["char2idx"]:
67
+ ids = [0] * 8
68
+ break
69
+ ids[i] = self.pinyin_dict["char2idx"][p]
70
+ pinyin_locs[index] = ids
71
+
72
+ # find chinese character location, and generate pinyin ids
73
+ pinyin_ids = []
74
+ for idx, (token, offset) in enumerate(zip(tokenizer_output.tokens, tokenizer_output.offsets)):
75
+ if offset[1] - offset[0] != 1:
76
+ pinyin_ids.append([0] * 8)
77
+ continue
78
+ if offset[0] in pinyin_locs:
79
+ pinyin_ids.append(pinyin_locs[offset[0]])
80
+ else:
81
+ pinyin_ids.append([0] * 8)
82
+
83
+ return pinyin_ids
tokenizer_config.json ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "auto_map": {
3
+ "AutoTokenizer": [
4
+ "tokenizer.ChineseBertTokenizer",
5
+ null
6
+ ]
7
+ },
8
+ "cls_token": "[CLS]",
9
+ "do_basic_tokenize": true,
10
+ "do_lower_case": true,
11
+ "mask_token": "[MASK]",
12
+ "model_max_length": 1000000000000000019884624838656,
13
+ "never_split": null,
14
+ "pad_token": "[PAD]",
15
+ "sep_token": "[SEP]",
16
+ "special_tokens_map_file": null,
17
+ "strip_accents": null,
18
+ "tokenize_chinese_chars": true,
19
+ "tokenizer_class": "ChineseBertTokenizer",
20
+ "unk_token": "[UNK]"
21
+ }
vocab.txt ADDED
The diff for this file is too large to render. See raw diff