uer commited on
Commit
c03c9c0
1 Parent(s): 85c8ab4

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +21 -0
README.md CHANGED
@@ -161,6 +161,27 @@ python3 scripts/convert_bert_from_uer_to_huggingface.py --input_model_path model
161
  ### BibTeX entry and citation info
162
 
163
  ```
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
164
  @article{zhao2019uer,
165
  title={UER: An Open-Source Toolkit for Pre-training Models},
166
  author={Zhao, Zhe and Chen, Hui and Zhang, Jinbin and Zhao, Xin and Liu, Tao and Lu, Wei and Chen, Xi and Deng, Haotang and Ju, Qi and Du, Xiaoyong},
161
  ### BibTeX entry and citation info
162
 
163
  ```
164
+ @article{devlin2018bert,
165
+ title={Bert: Pre-training of deep bidirectional transformers for language understanding},
166
+ author={Devlin, Jacob and Chang, Ming-Wei and Lee, Kenton and Toutanova, Kristina},
167
+ journal={arXiv preprint arXiv:1810.04805},
168
+ year={2018}
169
+ }
170
+
171
+ @article{liu2019roberta,
172
+ title={Roberta: A robustly optimized bert pretraining approach},
173
+ author={Liu, Yinhan and Ott, Myle and Goyal, Naman and Du, Jingfei and Joshi, Mandar and Chen, Danqi and Levy, Omer and Lewis, Mike and Zettlemoyer, Luke and Stoyanov, Veselin},
174
+ journal={arXiv preprint arXiv:1907.11692},
175
+ year={2019}
176
+ }
177
+
178
+ @article{turc2019,
179
+ title={Well-Read Students Learn Better: On the Importance of Pre-training Compact Models},
180
+ author={Turc, Iulia and Chang, Ming-Wei and Lee, Kenton and Toutanova, Kristina},
181
+ journal={arXiv preprint arXiv:1908.08962v2 },
182
+ year={2019}
183
+ }
184
+
185
  @article{zhao2019uer,
186
  title={UER: An Open-Source Toolkit for Pre-training Models},
187
  author={Zhao, Zhe and Chen, Hui and Zhang, Jinbin and Zhao, Xin and Liu, Tao and Lu, Wei and Chen, Xi and Deng, Haotang and Ju, Qi and Du, Xiaoyong},