junnyu commited on
Commit
7acf589
1 Parent(s): ca4f6fd

Create README.md

Browse files
Files changed (1) hide show
  1. README.md +66 -0
README.md ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ language: zh
3
+ tags:
4
+ - gau-alpha
5
+ - paddlepaddle
6
+ inference: False
7
+ ---
8
+
9
+ # pytorch 和 paddle代码
10
+ https://github.com/JunnYu/GAU-alpha-pytorch
11
+
12
+ # bert4keras代码
13
+ https://github.com/ZhuiyiTechnology/GAU-alpha
14
+
15
+ # Install
16
+ ```bash
17
+ 进入https://github.com/JunnYu/GAU-alpha-pytorch,
18
+ 下载paddle代码gau_alpha_paddle
19
+ ```
20
+
21
+ # Usage
22
+ ```python
23
+ import paddle
24
+ from transformers import BertTokenizer as GAUAlphaTokenizer
25
+ from gau_alpha_paddle import GAUAlphaForMaskedLM
26
+
27
+ text = "今天[MASK]很好,我[MASK]去公园玩。"
28
+ tokenizer = GAUAlphaTokenizer.from_pretrained(
29
+ "junnyu/chinese_GAU-alpha-char_L-24_H-768"
30
+ )
31
+ pd_model = GAUAlphaForMaskedLM.from_pretrained("chinese_GAU-alpha-char_L-24_H-768")
32
+ pd_model.eval()
33
+ pd_inputs = tokenizer(text)
34
+ pd_inputs = {k: paddle.to_tensor([v]) for k, v in pd_inputs.items()}
35
+
36
+ with paddle.no_grad():
37
+ pd_outputs = pd_model(**pd_inputs)[0][0]
38
+
39
+ pd_outputs_sentence = "paddle: "
40
+ for i, id in enumerate(tokenizer.encode(text)):
41
+ if id == tokenizer.mask_token_id:
42
+ val, idx = paddle.nn.functional.softmax(pd_outputs[i], -1).topk(k=5)
43
+ tokens = tokenizer.convert_ids_to_tokens(idx)
44
+ new_tokens = []
45
+ for v, t in zip(val.cpu(), tokens):
46
+ new_tokens.append(f"{t}+{round(v.item(),4)}")
47
+ pd_outputs_sentence += "[" + "||".join(new_tokens) + "]"
48
+ else:
49
+ pd_outputs_sentence += "".join(
50
+ tokenizer.convert_ids_to_tokens([id], skip_special_tokens=True)
51
+ )
52
+ print(pd_outputs_sentence)
53
+ # paddle: 今天[天+0.8657||气+0.0535||阳+0.0165||,+0.0126||晴+0.0111]很好,我[要+0.4619||想+0.4352||又+0.0252||就+0.0157||跑+0.0064]去公园玩。
54
+ ```
55
+
56
+ # Reference
57
+ Bibtex:
58
+
59
+ ```tex
60
+ @techreport{gau-alpha,
61
+ title={GAU-α: GAU-based Transformers for NLP - ZhuiyiAI},
62
+ author={Jianlin Su, Shengfeng Pan, Bo Wen, Yunfeng Liu},
63
+ year={2022},
64
+ url="https://github.com/ZhuiyiTechnology/GAU-alpha",
65
+ }
66
+ ```