asafaya19 commited on
Commit
143035c
1 Parent(s): 880261f

correct config, update model card

Browse files
Files changed (2) hide show
  1. README.md +13 -17
  2. config.json +1 -1
README.md CHANGED
@@ -12,23 +12,19 @@ Pretrained BERT base language model for Arabic
12
 
13
  _If you use this model in your work, please cite this paper:_
14
 
15
- <!--```
16
- @inproceedings{
17
- title={KUISAIL at SemEval-2020 Task 12: BERT-CNN for Offensive Speech Identification in Social Media},
18
- author={Safaya, Ali and Abdullatif, Moutasem and Yuret, Deniz},
19
- booktitle={Proceedings of the International Workshop on Semantic Evaluation (SemEval)},
20
- year={2020}
21
- }
22
- ```-->
23
-
24
  ```
25
- @misc{safaya2020kuisail,
26
- title={KUISAIL at SemEval-2020 Task 12: BERT-CNN for Offensive Speech Identification in Social Media},
27
- author={Ali Safaya and Moutasem Abdullatif and Deniz Yuret},
28
- year={2020},
29
- eprint={2007.13184},
30
- archivePrefix={arXiv},
31
- primaryClass={cs.CL}
 
 
 
 
 
32
  }
33
  ```
34
 
@@ -60,7 +56,7 @@ You can use this model by installing `torch` or `tensorflow` and Huggingface lib
60
  from transformers import AutoTokenizer, AutoModel
61
 
62
  tokenizer = AutoTokenizer.from_pretrained("asafaya/bert-base-arabic")
63
- model = AutoModel.from_pretrained("asafaya/bert-base-arabic")
64
  ```
65
 
66
  ## Results
12
 
13
  _If you use this model in your work, please cite this paper:_
14
 
 
 
 
 
 
 
 
 
 
15
  ```
16
+ @inproceedings{safaya-etal-2020-kuisail,
17
+ title = "{KUISAIL} at {S}em{E}val-2020 Task 12: {BERT}-{CNN} for Offensive Speech Identification in Social Media",
18
+ author = "Safaya, Ali and
19
+ Abdullatif, Moutasem and
20
+ Yuret, Deniz",
21
+ booktitle = "Proceedings of the Fourteenth Workshop on Semantic Evaluation",
22
+ month = dec,
23
+ year = "2020",
24
+ address = "Barcelona (online)",
25
+ publisher = "International Committee for Computational Linguistics",
26
+ url = "https://www.aclweb.org/anthology/2020.semeval-1.271",
27
+ pages = "2054--2059",
28
  }
29
  ```
30
 
56
  from transformers import AutoTokenizer, AutoModel
57
 
58
  tokenizer = AutoTokenizer.from_pretrained("asafaya/bert-base-arabic")
59
+ model = AutoModelForMaskedLM.from_pretrained("asafaya/bert-base-arabic")
60
  ```
61
 
62
  ## Results
config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "architectures": [
3
- "BertModel"
4
  ],
5
  "attention_probs_dropout_prob": 0.1,
6
  "gradient_checkpointing": false,
1
  {
2
  "architectures": [
3
+ "BertForMaskedLM"
4
  ],
5
  "attention_probs_dropout_prob": 0.1,
6
  "gradient_checkpointing": false,