update citation
Browse files
README.md
CHANGED
@@ -17,6 +17,8 @@ This model is released under the [Creative Commons 4.0 International License](ht
|
|
17 |
|
18 |
#### Reference
|
19 |
|
|
|
|
|
20 |
```
|
21 |
@InProceedings{sugimoto_nlp2023_jmedroberta,
|
22 |
author = "杉本海人 and 壹岐太一 and 知田悠生 and 金沢輝一 and 相澤彰子",
|
@@ -27,6 +29,18 @@ This model is released under the [Creative Commons 4.0 International License](ht
|
|
27 |
}
|
28 |
```
|
29 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
30 |
## Datasets used for pre-training
|
31 |
|
32 |
- abstracts (train: 1.6GB (10M sentences), validation: 0.2GB (1.3M sentences))
|
|
|
17 |
|
18 |
#### Reference
|
19 |
|
20 |
+
Ja:
|
21 |
+
|
22 |
```
|
23 |
@InProceedings{sugimoto_nlp2023_jmedroberta,
|
24 |
author = "杉本海人 and 壹岐太一 and 知田悠生 and 金沢輝一 and 相澤彰子",
|
|
|
29 |
}
|
30 |
```
|
31 |
|
32 |
+
En:
|
33 |
+
|
34 |
+
```
|
35 |
+
@InProceedings{sugimoto_nlp2023_jmedroberta,
|
36 |
+
author = "Sugimoto, Kaito and Iki, Taichi and Chida, Yuki and Kanazawa, Teruhito and Aizawa, Akiko",
|
37 |
+
title = "J{M}ed{R}o{BERT}a: a Japanese Pre-trained Language Model on Academic Articles in Medical Sciences (in Japanese)",
|
38 |
+
booktitle = "Proceedings of the 29th Annual Meeting of the Association for Natural Language Processing",
|
39 |
+
year = "2023",
|
40 |
+
url = "https://www.anlp.jp/proceedings/annual_meeting/2023/pdf_dir/P3-1.pdf"
|
41 |
+
}
|
42 |
+
```
|
43 |
+
|
44 |
## Datasets used for pre-training
|
45 |
|
46 |
- abstracts (train: 1.6GB (10M sentences), validation: 0.2GB (1.3M sentences))
|