asahi417 commited on
Commit
3feb3b7
1 Parent(s): e83660f
Files changed (3) hide show
  1. .gitignore +1 -0
  2. README.md +86 -21
  3. get_model_list.py +71 -30
.gitignore ADDED
@@ -0,0 +1 @@
 
1
+ metric_files
README.md CHANGED
@@ -96,26 +96,6 @@ We ask annotators to ignore those special tokens but label the verified users' m
96
  For the temporal-shift setting, model should be trained on `train_2020` with `validation_2020` and evaluate on `test_2021`.
97
  In general, model would be trained on `train_all`, the most representative training set with `validation_2021` and evaluate on `test_2021`.
98
 
99
-
100
- ### Models
101
-
102
- Following models are fine-tuned on `train_all` and validated on `validation_2021` of `tner/tweetner7`. See full model list [here](https://github.com/asahi417/tner/blob/master/MODEL_CARD.md#models-for-tweetner7).
103
-
104
- | Model (link) | Data | Language Model |
105
- |:--------------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------|:----------------------------------------------------------------------------------------|
106
- | [`tner/roberta-large-tweetner7-all`](https://huggingface.co/tner/roberta-large-tweetner7-all) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`roberta-large`](https://huggingface.co/roberta-large) |
107
- | [`tner/roberta-base-tweetner7-all`](https://huggingface.co/tner/roberta-base-tweetner7-all) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`roberta-base`](https://huggingface.co/roberta-base) |
108
- | [`tner/twitter-roberta-base-2019-90m-tweetner7-all`](https://huggingface.co/tner/twitter-roberta-base-2019-90m-tweetner7-all) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`twitter-roberta-base-2019-90m`](https://huggingface.co/twitter-roberta-base-2019-90m) |
109
- | [`tner/twitter-roberta-base-dec2020-tweetner7-all`](https://huggingface.co/tner/twitter-roberta-base-dec2020-tweetner7-all) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`twitter-roberta-base-dec2020`](https://huggingface.co/twitter-roberta-base-dec2020) |
110
- | [`tner/twitter-roberta-base-dec2021-tweetner7-all`](https://huggingface.co/tner/twitter-roberta-base-dec2021-tweetner7-all) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`twitter-roberta-base-dec2021`](https://huggingface.co/twitter-roberta-base-dec2021) |
111
-
112
-
113
-
114
- ### Reproduce Experimental Result
115
-
116
- To reproduce the experimental result on our AACL paper, please see the repository
117
- [https://github.com/asahi417/tner/tree/master/examples/tweetner7_paper](https://github.com/asahi417/tner/tree/master/examples/tweetner7_paper).
118
-
119
  ## Dataset Structure
120
 
121
  ### Data Instances
@@ -153,7 +133,92 @@ The label2id dictionary can be found at [here](https://huggingface.co/datasets/t
153
  ```
154
 
155
 
156
- ### Citation Information
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
157
 
158
  ```
159
  @inproceedings{ushio-etal-2022-tweet,
96
  For the temporal-shift setting, model should be trained on `train_2020` with `validation_2020` and evaluate on `test_2021`.
97
  In general, model would be trained on `train_all`, the most representative training set with `validation_2021` and evaluate on `test_2021`.
98
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
99
  ## Dataset Structure
100
 
101
  ### Data Instances
133
  ```
134
 
135
 
136
+ ## Models
137
+
138
+ | Model (link) | Data | Language Model | Micro F1 (2021) | Macro F1 (2021) | F1 (2021)/corporation | F1 (2021)/creative_work | F1 (2021)/event | F1 (2021)/group | F1 (2021)/location | F1 (2021)/person | F1 (2021)/product | Micro F1 (2020) | Macro F1 (2020) | F1 (2020)/corporation | F1 (2020)/creative_work | F1 (2020)/event | F1 (2020)/group | F1 (2020)/location | F1 (2020)/person | F1 (2020)/product | Entity-Span F1 (2021) | Entity-Span F1 (2020) |
139
+ |:--------------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------|------------------:|------------------:|------------------------:|--------------------------:|------------------:|------------------:|---------------------:|-------------------:|--------------------:|------------------:|------------------:|------------------------:|--------------------------:|------------------:|------------------:|---------------------:|-------------------:|--------------------:|------------------------:|------------------------:|
140
+ | [`tner/roberta-large-tweetner7-all`](https://huggingface.co/tner/roberta-large-tweetner7-all) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`roberta-large`](https://huggingface.co/roberta-large) | 65.75 | 61.25 | 53.92 | 47.61 | 46.73 | 61.4 | 67.07 | 82.93 | 69.06 | 66.29 | 62.97 | 61.84 | 51.59 | 50.29 | 55.99 | 69.23 | 82.01 | 69.86 | 78.82 | 76.43 |
141
+ | [`tner/roberta-base-tweetner7-all`](https://huggingface.co/tner/roberta-base-tweetner7-all) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`roberta-base`](https://huggingface.co/roberta-base) | 65.16 | 60.81 | 51.74 | 46.64 | 46.73 | 60.71 | 68.33 | 83.77 | 67.77 | 65.32 | 61.66 | 61.94 | 48.94 | 45.14 | 56.58 | 68.94 | 82.75 | 67.33 | 78.93 | 75.23 |
142
+ | [`tner/twitter-roberta-base-2019-90m-tweetner7-all`](https://huggingface.co/tner/twitter-roberta-base-2019-90m-tweetner7-all) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`cardiffnlp/twitter-roberta-base-2019-90m`](https://huggingface.co/cardiffnlp/twitter-roberta-base-2019-90m) | 65.68 | 61 | 50.87 | 47.3 | 48.41 | 61.48 | 67.94 | 83.93 | 67.06 | 65.46 | 61.22 | 56.85 | 52.15 | 46.68 | 56.68 | 65.1 | 84.55 | 66.5 | 78.89 | 76.43 |
143
+ | [`tner/twitter-roberta-base-dec2020-tweetner7-all`](https://huggingface.co/tner/twitter-roberta-base-dec2020-tweetner7-all) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`cardiffnlp/twitter-roberta-base-dec2020`](https://huggingface.co/cardiffnlp/twitter-roberta-base-dec2020) | 65.26 | 60.7 | 51.53 | 47.6 | 46.69 | 60.93 | 66.89 | 83.87 | 67.38 | 65.44 | 61.39 | 56.76 | 55.06 | 46.24 | 55.52 | 64.26 | 84.87 | 67 | 78.68 | 75.87 |
144
+ | [`tner/bertweet-large-tweetner7-all`](https://huggingface.co/tner/bertweet-large-tweetner7-all) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`cardiffnlp/twitter-roberta-base-dec2021vinai/bertweet-large`](https://huggingface.co/cardiffnlp/twitter-roberta-base-dec2021vinai/bertweet-large) | 66.46 | 61.87 | 54.5 | 47.36 | 49.15 | 62.38 | 67.55 | 84.15 | 68.02 | 66.76 | 63.08 | 58.89 | 55.24 | 48.89 | 59.85 | 66.67 | 83.49 | 68.51 | 79.53 | 77.59 |
145
+ | [`tner/bertweet-base-tweetner7-all`](https://huggingface.co/tner/bertweet-base-tweetner7-all) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`vinai/bertweet-base`](https://huggingface.co/vinai/bertweet-base) | 65.36 | 60.52 | 52.51 | 46.54 | 48.06 | 60.33 | 65.67 | 84.08 | 66.46 | 65.74 | 61.61 | 57.22 | 54.1 | 48.55 | 57.35 | 64.57 | 84.16 | 65.36 | 78.99 | 76.91 |
146
+ | [`tner/bert-large-tweetner7-all`](https://huggingface.co/tner/bert-large-tweetner7-all) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`bert-large`](https://huggingface.co/bert-large) | 63.58 | 59 | 50.13 | 40.16 | 47 | 59.74 | 67.2 | 81.86 | 66.91 | 62.49 | 58.63 | 55.56 | 47.65 | 43.08 | 54.88 | 63.9 | 80.31 | 65.04 | 77.21 | 73.58 |
147
+ | [`tner/bert-base-tweetner7-all`](https://huggingface.co/tner/bert-base-tweetner7-all) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`bert-base`](https://huggingface.co/bert-base) | 62.3 | 57.59 | 51.41 | 38.86 | 45.81 | 56.61 | 62.65 | 81.97 | 65.8 | 62.1 | 57.74 | 56.55 | 41.52 | 45.04 | 54.23 | 60.53 | 81.86 | 64.49 | 76.62 | 72.98 |
148
+ | [`tner/roberta-large-tweetner7-continuous`](https://huggingface.co/tner/roberta-large-tweetner7-continuous) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`roberta-large`](https://huggingface.co/roberta-large) | 66.02 | 60.9 | 53.15 | 44.42 | 48.79 | 61.15 | 67.41 | 84.72 | 66.63 | 66.26 | 62.4 | 57.75 | 54.14 | 48.48 | 57.52 | 67.69 | 83.33 | 67.84 | 79.14 | 76.44 |
149
+ | [`tner/roberta-base-tweetner7-continuous`](https://huggingface.co/tner/roberta-base-tweetner7-continuous) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`roberta-base`](https://huggingface.co/roberta-base) | 65.47 | 60.01 | 50.97 | 41.68 | 46.75 | 61.52 | 67.98 | 84.49 | 66.67 | 65.15 | 60.82 | 58.05 | 49.85 | 44.74 | 56.05 | 67.08 | 82.63 | 67.33 | 78.1 | 75.05 |
150
+ | [`tner/twitter-roberta-base-2019-90m-tweetner7-continuous`](https://huggingface.co/tner/twitter-roberta-base-2019-90m-tweetner7-continuous) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`cardiffnlp/twitter-roberta-base-2019-90m`](https://huggingface.co/cardiffnlp/twitter-roberta-base-2019-90m) | 65.87 | 61.07 | 51.66 | 48.01 | 48.47 | 60.42 | 68.36 | 84.59 | 66.01 | 64.76 | 60.58 | 56.19 | 54.97 | 44.67 | 53.17 | 63.53 | 83.64 | 67.88 | 78.44 | 75.53 |
151
+ | [`tner/twitter-roberta-base-dec2020-tweetner7-continuous`](https://huggingface.co/tner/twitter-roberta-base-dec2020-tweetner7-continuous) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`cardiffnlp/twitter-roberta-base-dec2020`](https://huggingface.co/cardiffnlp/twitter-roberta-base-dec2020) | 65.51 | 60.57 | 53.56 | 45.3 | 46.92 | 61.07 | 66.28 | 84.33 | 66.49 | 65.29 | 61.28 | 59.26 | 55.59 | 43.84 | 54.38 | 64.14 | 84.08 | 67.68 | 78.03 | 75.88 |
152
+ | [`tner/bertweet-large-tweetner7-continuous`](https://huggingface.co/tner/bertweet-large-tweetner7-continuous) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`cardiffnlp/twitter-roberta-base-dec2021vinai/bertweet-large`](https://huggingface.co/cardiffnlp/twitter-roberta-base-dec2021vinai/bertweet-large) | 66.41 | 61.66 | 55.07 | 46.85 | 48.16 | 61.44 | 68.87 | 84.04 | 67.18 | 65.88 | 61.82 | 58.38 | 54.65 | 46.12 | 56.39 | 66.67 | 83.89 | 66.67 | 78.97 | 76.42 |
153
+ | [`tner/bertweet-base-tweetner7-continuous`](https://huggingface.co/tner/bertweet-base-tweetner7-continuous) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`vinai/bertweet-base`](https://huggingface.co/vinai/bertweet-base) | 65.84 | 61.02 | 51.85 | 46.83 | 49.66 | 61.17 | 66.58 | 84.47 | 66.59 | 65.16 | 61.35 | 55.76 | 56.83 | 46.22 | 56.32 | 66.27 | 82.94 | 65.13 | 79.1 | 76.8 |
154
+ | [`tner/bert-large-tweetner7-continuous`](https://huggingface.co/tner/bert-large-tweetner7-continuous) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`bert-large`](https://huggingface.co/bert-large) | 63.2 | 57.67 | 51.4 | 39.74 | 42.55 | 58.6 | 63.36 | 81.27 | 66.78 | 62.48 | 57.87 | 56.56 | 43.65 | 45.51 | 50.38 | 60.26 | 80.62 | 68.12 | 76.04 | 72.46 |
155
+ | [`tner/bert-base-tweetner7-continuous`](https://huggingface.co/tner/bert-base-tweetner7-continuous) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`bert-base`](https://huggingface.co/bert-base) | 61.8 | 56.84 | 47.4 | 38.22 | 44.05 | 57.73 | 64.42 | 80.72 | 65.31 | 61.41 | 57.11 | 54.41 | 42.41 | 41.46 | 51.25 | 63.49 | 79.9 | 66.84 | 76.53 | 72.5 |
156
+ | [`tner/roberta-large-tweetner7-2021`](https://huggingface.co/tner/roberta-large-tweetner7-2021) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`roberta-large`](https://huggingface.co/roberta-large) | 64.05 | 59.11 | 50.58 | 43.91 | 46.6 | 60.68 | 63.99 | 82.68 | 65.3 | 63.36 | 59.15 | 53.22 | 49.41 | 46.61 | 54.65 | 63.12 | 81.33 | 65.67 | 77.71 | 74.36 |
157
+ | [`tner/roberta-base-tweetner7-2021`](https://huggingface.co/tner/roberta-base-tweetner7-2021) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`roberta-base`](https://huggingface.co/roberta-base) | 61.76 | 57 | 48.9 | 38 | 45.51 | 57.02 | 65.06 | 81.34 | 63.17 | 60.5 | 56.12 | 49.86 | 45.33 | 39.83 | 52.81 | 60.95 | 79.93 | 64.15 | 76.92 | 73.75 |
158
+ | [`tner/twitter-roberta-base-2019-90m-tweetner7-2021`](https://huggingface.co/tner/twitter-roberta-base-2019-90m-tweetner7-2021) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`cardiffnlp/twitter-roberta-base-2019-90m`](https://huggingface.co/cardiffnlp/twitter-roberta-base-2019-90m) | 63.23 | 56.72 | 46.73 | 33.12 | 45.97 | 57.61 | 64.42 | 83.21 | 65.95 | 61.91 | 56.09 | 48.59 | 41.1 | 44.35 | 49.57 | 64.16 | 82.3 | 62.6 | 75.69 | 73.04 |
159
+ | [`tner/twitter-roberta-base-dec2020-tweetner7-2021`](https://huggingface.co/tner/twitter-roberta-base-dec2020-tweetner7-2021) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`cardiffnlp/twitter-roberta-base-dec2020`](https://huggingface.co/cardiffnlp/twitter-roberta-base-dec2020) | 63.98 | 58.91 | 51.04 | 40.86 | 46.2 | 60.22 | 65.55 | 82.64 | 65.88 | 63.07 | 58.51 | 53.26 | 47.09 | 40.92 | 56.46 | 64.86 | 82.1 | 64.89 | 77.87 | 75.35 |
160
+ | [`tner/bertweet-large-tweetner7-2021`](https://huggingface.co/tner/bertweet-large-tweetner7-2021) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`cardiffnlp/twitter-roberta-base-dec2021vinai/bertweet-large`](https://huggingface.co/cardiffnlp/twitter-roberta-base-dec2021vinai/bertweet-large) | 62.9 | 58.13 | 48.87 | 42.33 | 44.87 | 56.4 | 66.21 | 81.05 | 67.16 | 61.61 | 56.84 | 54.24 | 40.83 | 43.34 | 50.3 | 64.56 | 81.57 | 63.05 | 76.5 | 74.46 |
161
+ | [`tner/bertweet-base-tweetner7-2021`](https://huggingface.co/tner/bertweet-base-tweetner7-2021) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`vinai/bertweet-base`](https://huggingface.co/vinai/bertweet-base) | 63.09 | 57.35 | 45.66 | 40.99 | 46.28 | 59.32 | 63.34 | 82.79 | 63.1 | 62.06 | 57.23 | 49.87 | 45.83 | 43.89 | 52.65 | 63.58 | 81.79 | 63.01 | 77.88 | 75.95 |
162
+ | [`tner/bert-large-tweetner7-2021`](https://huggingface.co/tner/bert-large-tweetner7-2021) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`bert-large`](https://huggingface.co/bert-large) | 59.75 | 53.93 | 44.87 | 34.17 | 40.24 | 55.68 | 63.95 | 79.4 | 59.19 | 56.63 | 50.97 | 49.32 | 31.58 | 30.39 | 50.27 | 59.76 | 76.07 | 59.41 | 74.98 | 70.66 |
163
+ | [`tner/bert-base-tweetner7-2021`](https://huggingface.co/tner/bert-base-tweetner7-2021) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`bert-base`](https://huggingface.co/bert-base) | 60.67 | 55.5 | 46.8 | 35.35 | 41.28 | 56.23 | 64.78 | 79.89 | 64.17 | 58.45 | 54.22 | 48.84 | 43.05 | 32.27 | 50.65 | 61.54 | 76.68 | 66.5 | 75.72 | 70.86 |
164
+ | [`tner/roberta-large-tweetner7-2020`](https://huggingface.co/tner/roberta-large-tweetner7-2020) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`roberta-large`](https://huggingface.co/roberta-large) | 64.76 | 60 | 52.23 | 45.89 | 48.51 | 60.88 | 64.43 | 83.32 | 64.75 | 65.67 | 61.88 | 56.82 | 51.85 | 51.06 | 58.65 | 67.06 | 82.59 | 65.15 | 78.36 | 76.11 |
165
+ | [`tner/roberta-base-tweetner7-2020`](https://huggingface.co/tner/roberta-base-tweetner7-2020) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`roberta-base`](https://huggingface.co/roberta-base) | 64.21 | 59.11 | 50.75 | 44.44 | 43.9 | 59.15 | 65.84 | 83.92 | 65.73 | 64.25 | 60.23 | 58.59 | 48.94 | 43.84 | 55.31 | 65.63 | 82 | 67.32 | 77.89 | 74.8 |
166
+ | [`tner/twitter-roberta-base-2019-90m-tweetner7-2020`](https://huggingface.co/tner/twitter-roberta-base-2019-90m-tweetner7-2020) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`cardiffnlp/twitter-roberta-base-2019-90m`](https://huggingface.co/cardiffnlp/twitter-roberta-base-2019-90m) | 64.28 | 59.31 | 48.54 | 46.89 | 43.69 | 59.09 | 67.01 | 84 | 65.98 | 65.42 | 61.11 | 56.28 | 53.69 | 43.39 | 56.23 | 64.76 | 84.73 | 68.72 | 77.9 | 76.56 |
167
+ | [`tner/twitter-roberta-base-dec2020-tweetner7-2020`](https://huggingface.co/tner/twitter-roberta-base-dec2020-tweetner7-2020) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`cardiffnlp/twitter-roberta-base-dec2020`](https://huggingface.co/cardiffnlp/twitter-roberta-base-dec2020) | 62.87 | 58.26 | 49.9 | 44.9 | 43.68 | 57.62 | 64.38 | 82.29 | 65.07 | 64.39 | 60.31 | 55.19 | 51.72 | 42.91 | 55.95 | 65.47 | 83.98 | 66.98 | 76.49 | 75.65 |
168
+ | [`tner/bertweet-large-tweetner7-2020`](https://huggingface.co/tner/bertweet-large-tweetner7-2020) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`cardiffnlp/twitter-roberta-base-dec2021vinai/bertweet-large`](https://huggingface.co/cardiffnlp/twitter-roberta-base-dec2021vinai/bertweet-large) | 64.01 | 59.47 | 52.29 | 46.3 | 45 | 59.27 | 65.53 | 82.73 | 65.19 | 65.93 | 62.61 | 59.67 | 58.92 | 45.01 | 54.55 | 68.09 | 83.59 | 68.47 | 78.26 | 77.38 |
169
+ | [`tner/bertweet-base-tweetner7-2020`](https://huggingface.co/tner/bertweet-base-tweetner7-2020) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`vinai/bertweet-base`](https://huggingface.co/vinai/bertweet-base) | 64.06 | 59.44 | 51.62 | 45.72 | 45.87 | 59.74 | 64.7 | 82.71 | 65.74 | 66.38 | 62.41 | 58.05 | 54.95 | 49.9 | 56.18 | 67.45 | 84.75 | 65.57 | 77.91 | 77.73 |
170
+ | [`tner/bert-large-tweetner7-2020`](https://huggingface.co/tner/bert-large-tweetner7-2020) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`bert-large`](https://huggingface.co/bert-large) | 61.43 | 56.14 | 50.11 | 39.03 | 41.8 | 57.31 | 61.13 | 80.6 | 63.02 | 62.19 | 58.15 | 56.68 | 43.75 | 47.24 | 49.72 | 62.62 | 80.03 | 66.97 | 75.86 | 73.79 |
171
+ | [`tner/bert-base-tweetner7-2020`](https://huggingface.co/tner/bert-base-tweetner7-2020) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`bert-base`](https://huggingface.co/bert-base) | 60.09 | 54.67 | 44.11 | 37.52 | 40.28 | 55.77 | 61.8 | 80.52 | 62.73 | 60.87 | 56.49 | 50.77 | 44.07 | 38.35 | 53.18 | 63.29 | 81.06 | 64.71 | 75.61 | 72.42 |
172
+
173
+ Model description follows below.
174
+ * Model with suffix `-all`: Model fine-tuned on `train_all` and validated on `validation_2021`.
175
+ * Model with suffix `-continuous`: Model fine-tuned on `train_2021` continuously after fine-tuning on `train_2020` and validated on `validation_2021`.
176
+ * Model with suffix `-2021`: Model fine-tuned only on `train_2021` and validated on `validation_2021`.
177
+ * Model with suffix `-2020`: Model fine-tuned only on `train_2021` and validated on `validation_2020`.
178
+
179
+
180
+ ### Sub Models (used in ablation study)
181
+
182
+ - Model fine-tuned only on `train_random` and validated on `validation_2020`.
183
+
184
+ | Model (link) | Data | Language Model | Micro F1 (2021) | Macro F1 (2021) | F1 (2021)/corporation | F1 (2021)/creative_work | F1 (2021)/event | F1 (2021)/group | F1 (2021)/location | F1 (2021)/person | F1 (2021)/product | Micro F1 (2020) | Macro F1 (2020) | F1 (2020)/corporation | F1 (2020)/creative_work | F1 (2020)/event | F1 (2020)/group | F1 (2020)/location | F1 (2020)/person | F1 (2020)/product | Entity-Span F1 (2021) | Entity-Span F1 (2020) |
185
+ |:------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------|------------------:|------------------:|------------------------:|--------------------------:|------------------:|------------------:|---------------------:|-------------------:|--------------------:|------------------:|------------------:|------------------------:|--------------------------:|------------------:|------------------:|---------------------:|-------------------:|--------------------:|------------------------:|------------------------:|
186
+ | [`tner/roberta-large-tweetner7-random`](https://huggingface.co/tner/roberta-large-tweetner7-random) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`roberta-large`](https://huggingface.co/roberta-large) | 66.33 | 60.96 | 52.24 | 45.19 | 48.95 | 63.28 | 66.92 | 83.84 | 66.34 | 64.4 | 60.09 | 53.45 | 50.27 | 46.68 | 57.25 | 65.44 | 81.79 | 65.73 | 79 | 75.52 |
187
+ | [`tner/roberta-base-tweetner7-random`](https://huggingface.co/tner/roberta-base-tweetner7-random) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`roberta-base`](https://huggingface.co/roberta-base) | 64.04 | 59.23 | 50.73 | 42.35 | 45.98 | 59.73 | 67.95 | 82.32 | 65.58 | 64.14 | 59.78 | 57.58 | 47.62 | 42.19 | 56.48 | 67.07 | 82.71 | 64.84 | 78.04 | 74.26 |
188
+ | [`tner/twitter-roberta-base-2019-90m-tweetner7-random`](https://huggingface.co/tner/twitter-roberta-base-2019-90m-tweetner7-random) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`cardiffnlp/twitter-roberta-base-2019-90m`](https://huggingface.co/cardiffnlp/twitter-roberta-base-2019-90m) | 63.29 | 58.5 | 50.56 | 41.68 | 45.7 | 59.91 | 64.8 | 83.02 | 63.82 | 64.29 | 60.67 | 56.85 | 48.88 | 45.36 | 55.03 | 71.75 | 82.29 | 64.55 | 77.36 | 76.21 |
189
+ | [`tner/twitter-roberta-base-dec2020-tweetner7-random`](https://huggingface.co/tner/twitter-roberta-base-dec2020-tweetner7-random) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`cardiffnlp/twitter-roberta-base-dec2020`](https://huggingface.co/cardiffnlp/twitter-roberta-base-dec2020) | 64.72 | 59.97 | 49.08 | 46.42 | 45.65 | 61.68 | 67.5 | 83.31 | 66.15 | 64.69 | 60.53 | 55.56 | 53.85 | 44.27 | 56.57 | 65.05 | 84.03 | 64.41 | 78.29 | 75.94 |
190
+ | [`tner/bertweet-large-tweetner7-random`](https://huggingface.co/tner/bertweet-large-tweetner7-random) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`cardiffnlp/twitter-roberta-base-dec2021vinai/bertweet-large`](https://huggingface.co/cardiffnlp/twitter-roberta-base-dec2021vinai/bertweet-large) | 64.86 | 60.49 | 53.59 | 45.47 | 46.19 | 61.64 | 66.16 | 82.79 | 67.58 | 66.02 | 62.72 | 57.81 | 58.19 | 47.64 | 58.78 | 68.25 | 83.36 | 64.97 | 78.43 | 77.2 |
191
+ | [`tner/bertweet-base-tweetner7-random`](https://huggingface.co/tner/bertweet-base-tweetner7-random) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`vinai/bertweet-base`](https://huggingface.co/vinai/bertweet-base) | 65.55 | 59.58 | 49.6 | 40.06 | 47.29 | 62.07 | 67.98 | 83.52 | 66.56 | 63.89 | 58.61 | 54.38 | 45.05 | 41.97 | 55.88 | 66.03 | 83.36 | 63.61 | 77.8 | 74.39 |
192
+ | [`tner/bert-large-tweetner7-random`](https://huggingface.co/tner/bert-large-tweetner7-random) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`bert-large`](https://huggingface.co/bert-large) | 62.39 | 57.54 | 49.15 | 39.72 | 44.79 | 57.67 | 67.22 | 81.17 | 63.07 | 61.54 | 57.09 | 56.34 | 42.81 | 42.69 | 53.36 | 61.98 | 81.04 | 61.43 | 76.49 | 73.29 |
193
+ | [`tner/bert-base-tweetner7-random`](https://huggingface.co/tner/bert-base-tweetner7-random) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`bert-base`](https://huggingface.co/bert-base) | 60.91 | 55.92 | 46.51 | 39.05 | 41.83 | 56.14 | 63.9 | 80.45 | 63.54 | 61.04 | 56.75 | 53.94 | 42.77 | 39.15 | 53.07 | 62.67 | 80.59 | 65.08 | 75.72 | 72.73 |
194
+
195
+ - Model fine-tuned on the self-labeled dataset on `extra_{2020,2021}` and validated on `validation_2020`.
196
+
197
+ | Model (link) | Data | Language Model | Micro F1 (2021) | Macro F1 (2021) | F1 (2021)/corporation | F1 (2021)/creative_work | F1 (2021)/event | F1 (2021)/group | F1 (2021)/location | F1 (2021)/person | F1 (2021)/product | Micro F1 (2020) | Macro F1 (2020) | F1 (2020)/corporation | F1 (2020)/creative_work | F1 (2020)/event | F1 (2020)/group | F1 (2020)/location | F1 (2020)/person | F1 (2020)/product | Entity-Span F1 (2021) | Entity-Span F1 (2020) |
198
+ |:----------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------|:--------------------------------------------------------|------------------:|------------------:|------------------------:|--------------------------:|------------------:|------------------:|---------------------:|-------------------:|--------------------:|------------------:|------------------:|------------------------:|--------------------------:|------------------:|------------------:|---------------------:|-------------------:|--------------------:|------------------------:|------------------------:|
199
+ | [`tner/roberta-large-tweetner7-selflabel2020`](https://huggingface.co/tner/roberta-large-tweetner7-selflabel2020) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`roberta-large`](https://huggingface.co/roberta-large) | 64.56 | 59.63 | 52.28 | 46.82 | 44.47 | 61.55 | 64.24 | 84.02 | 64.02 | 65.9 | 61.85 | 58.15 | 51.99 | 48.05 | 57.25 | 66.86 | 84.16 | 66.51 | 78.46 | 76.71 |
200
+ | [`tner/roberta-large-tweetner7-selflabel2021`](https://huggingface.co/tner/roberta-large-tweetner7-selflabel2021) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`roberta-large`](https://huggingface.co/roberta-large) | 64.6 | 59.45 | 50.21 | 45.89 | 45.18 | 60.3 | 66.71 | 83.46 | 64.38 | 64.75 | 60.65 | 56.19 | 50.41 | 47.31 | 55.21 | 67.46 | 81.9 | 66.06 | 78.57 | 76.63 |
201
+ | [`tner/roberta-large-tweetner7-2020-selflabel2020-all`](https://huggingface.co/tner/roberta-large-tweetner7-2020-selflabel2020-all) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`roberta-large`](https://huggingface.co/roberta-large) | 65.46 | 60.39 | 52.56 | 46.12 | 45.83 | 61.7 | 67.17 | 84.39 | 64.95 | 66.23 | 62.26 | 57.5 | 54.2 | 46.75 | 58.32 | 67.86 | 83.56 | 67.61 | 79.17 | 77.17 |
202
+ | [`tner/roberta-large-tweetner7-2020-selflabel2021-all`](https://huggingface.co/tner/roberta-large-tweetner7-2020-selflabel2021-all) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`roberta-large`](https://huggingface.co/roberta-large) | 64.52 | 59.45 | 50.67 | 45.38 | 44.53 | 60.63 | 66.19 | 83.59 | 65.17 | 66.05 | 61.83 | 58.23 | 53.44 | 44.39 | 59.79 | 68.09 | 83.43 | 65.43 | 78.5 | 76.94 |
203
+ | [`tner/roberta-large-tweetner7-selflabel2020-continuous`](https://huggingface.co/tner/roberta-large-tweetner7-selflabel2020-continuous) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`roberta-large`](https://huggingface.co/roberta-large) | 65.15 | 60.23 | 52.53 | 46.5 | 46.18 | 60.87 | 66.67 | 83.83 | 65.03 | 66.7 | 62.86 | 59.35 | 54.44 | 48.28 | 59.44 | 67.66 | 83.36 | 67.45 | 78.73 | 77.12 |
204
+ | [`tner/roberta-large-tweetner7-selflabel2021-continuous`](https://huggingface.co/tner/roberta-large-tweetner7-selflabel2021-continuous) | [`tweetner7`](https://huggingface.co/datasets/tner/tweetner7) | [`roberta-large`](https://huggingface.co/roberta-large) | 64.48 | 59.41 | 50.58 | 45.67 | 44.4 | 61.09 | 66.36 | 83.63 | 64.14 | 65.48 | 61.42 | 56.93 | 51.75 | 48.72 | 57.61 | 67.27 | 83.29 | 64.37 | 78.36 | 76.5 |
205
+
206
+
207
+ Model description follows below.
208
+ * Model with suffix `-self2020`: Fine-tuning on the self-annotated data of `extra_2020` split of [tweetner7](https://huggingface.co/datasets/tner/tweetner7).
209
+ * Model with suffix `-self2021`: Fine-tuning on the self-annotated data of `extra_2021` split of [tweetner7](https://huggingface.co/datasets/tner/tweetner7).
210
+ * Model with suffix `-2020-self2020-all`: Fine-tuning on the self-annotated data of `extra_2020` split of [tweetner7](https://huggingface.co/datasets/tner/tweetner7). Combined training dataset of `extra_2020` and `train_2020`.
211
+ * Model with suffix `-2020-self2021-all`: Fine-tuning on the self-annotated data of `extra_2021` split of [tweetner7](https://huggingface.co/datasets/tner/tweetner7). Combined training dataset of `extra_2021` and `train_2020`.
212
+ * Model with suffix `-2020-self2020-continuous`: Fine-tuning on the self-annotated data of `extra_2020` split of [tweetner7](https://huggingface.co/datasets/tner/tweetner7). Fine-tuning on `train_2020` and continuing fine-tuning on `extra_2020`.
213
+ * Model with suffix `-2020-self2021-continuous`: Fine-tuning on the self-annotated data of `extra_2021` split of [tweetner7](https://huggingface.co/datasets/tner/tweetner7). Fine-tuning on `train_2020` and continuing fine-tuning on `extra_2020`.
214
+
215
+ ### Reproduce Experimental Result
216
+
217
+ To reproduce the experimental result on our AACL paper, please see the repository
218
+ [https://github.com/asahi417/tner/tree/master/examples/tweetner7_paper](https://github.com/asahi417/tner/tree/master/examples/tweetner7_paper).
219
+
220
+
221
+ ## Citation Information
222
 
223
  ```
224
  @inproceedings{ushio-etal-2022-tweet,
get_model_list.py CHANGED
@@ -4,45 +4,86 @@ import requests
4
 
5
  import pandas as pd
6
 
 
 
 
7
 
8
- def download(filename, url):
 
 
 
 
 
 
 
 
 
 
9
  try:
10
  with open(filename) as f:
11
- json.load(f)
12
  except Exception:
13
- os.makedirs(os.path.dirname(filename), exist_ok=True)
14
- with open(filename, "wb") as f:
15
- r = requests.get(url)
16
- f.write(r.content)
17
- with open(filename) as f:
18
- tmp = json.load(f)
 
 
 
 
 
 
 
19
  return tmp
20
 
21
 
22
- "https://huggingface.co/tner/roberta-large-tweetner7-all/raw/main/eval/metric.test_2021.json"
 
 
 
 
 
 
 
 
 
 
23
 
24
- models = [
25
- "tner/roberta-large-tweetner7-all",
26
- "tner/roberta-base-tweetner7-all",
27
- "tner/twitter-roberta-base-2019-90m-tweetner7-all",
28
- "tner/twitter-roberta-base-dec2020-tweetner7-all",
29
- "tner/twitter-roberta-base-dec2021-tweetner7-all"
30
- "tner/roberta-large-tweetner7-2020",
31
- "tner/roberta-base-tweetner7-2020",
32
- "tner/twitter-roberta-base-2019-90m-tweetner7-2020",
33
- "tner/twitter-roberta-base-dec2020-tweetner7-2020",
34
- "tner/twitter-roberta-base-dec2021-tweetner7-2020"
35
  ]
36
 
37
- os.makedirs("metric_files", exist_ok=True)
38
 
39
- metrics = []
40
- for i in models:
41
- model_type = "all (2020 + 2021)" if i.endswith("all") else "2020 only"
42
- url = f"https://huggingface.co/{i}/raw/main/metric_summary.json"
43
- model_url = f"https://huggingface.co/{i}"
44
- metric = download(f"metric_files/{os.path.basename(i)}.json", url)
45
- metrics.append({"model": f"[{i}]({model_url})", "training data": model_type, "F1": metric["test/eval_f1"], "F1 (macro)": metric["test/eval_f1_macro"], "Accuracy": metric["test/eval_accuracy"]})
 
 
 
 
 
 
 
 
 
 
46
 
47
- df = pd.DataFrame(metrics)
48
- print(df.to_markdown(index=False))
 
 
4
 
5
  import pandas as pd
6
 
7
+ dataset_link = "[`tweetner7`](https://huggingface.co/datasets/tner/tweetner7)"
8
+ metric_dir = 'metric_files'
9
+ os.makedirs(metric_dir, exist_ok=True)
10
 
11
+
12
+ def lm_link(_model): return f"[`{_model}`](https://huggingface.co/{_model})"
13
+
14
+
15
+ def model_link(_model, _type): return f"[`tner/{_model}-tweetner7-{_type}`](https://huggingface.co/tner/{_model}-tweetner7-{_type})"
16
+
17
+
18
+ def download(_model, _type):
19
+ url = f"https://huggingface.co/tner/{_model}-tweetner7-{_type}/raw/main/eval"
20
+ filename = f"{metric_dir}/{_model}-{_type}.json"
21
+ print(url, filename)
22
  try:
23
  with open(filename) as f:
24
+ return json.load(f)
25
  except Exception:
26
+ tmp = {}
27
+ for metric in ["metric.test_2021", "metric.test_2020", "metric_span.test_2021", "metric_span.test_2020"]:
28
+ year = metric[-4:]
29
+ if metric not in tmp:
30
+ _metric = json.loads(requests.get(f"{url}/{metric}.json").content)
31
+ if '_span' in metric:
32
+ tmp[f"Entity-Span F1 ({year})"] = round(100 * _metric["micro/f1"], 2)
33
+ else:
34
+ tmp[f"Micro F1 ({year})"] = round(100 * _metric["micro/f1"], 2)
35
+ tmp[f"Macro F1 ({year})"] = round(100 * _metric["macro/f1"], 2)
36
+ tmp.update({f"F1 ({year})/{k}": round(100 * v['f1'], 2) for k, v in _metric["per_entity_metric"].items()})
37
+ with open(filename, "w") as f:
38
+ json.dump(tmp, f)
39
  return tmp
40
 
41
 
42
+ lms = [
43
+ "roberta-large",
44
+ "roberta-base",
45
+ "cardiffnlp/twitter-roberta-base-2019-90m",
46
+ "cardiffnlp/twitter-roberta-base-dec2020",
47
+ "cardiffnlp/twitter-roberta-base-dec2021"
48
+ "vinai/bertweet-large",
49
+ "vinai/bertweet-base",
50
+ "bert-large",
51
+ "bert-base"
52
+ ]
53
 
54
+ types = [
55
+ ["all", "continuous", "2021", "2020"],
56
+ ["random"],
57
+ [
58
+ "selflabel2020",
59
+ "selflabel2021",
60
+ "2020-selflabel2020-all",
61
+ "2020-selflabel2021-all",
62
+ "selflabel2020-continuous",
63
+ "selflabel2021-continuous"
64
+ ]
65
  ]
66
 
 
67
 
68
+ for tt in types:
69
+ metrics = []
70
+ for t in tt:
71
+ for lm in lms:
72
+
73
+ if 'selflabel' in t and lm != "roberta-large":
74
+ continue
75
+ _lm_link = lm_link(lm)
76
+ lm = os.path.basename(lm)
77
+ _model_link = model_link(lm, t)
78
+ __metric = {
79
+ "Model (link)": model_link(lm, t),
80
+ "Data": dataset_link,
81
+ "Language Model": _lm_link
82
+ }
83
+ __metric.update(download(lm, t))
84
+ metrics.append(__metric)
85
 
86
+ df = pd.DataFrame(metrics)
87
+ print(tt)
88
+ print(df.to_markdown(index=False))
89
+ print()