asahi417 commited on
Commit
724a86e
1 Parent(s): 9174f9c
Files changed (3) hide show
  1. README.md +76 -0
  2. metric.json +1 -0
  3. pytorch_model.bin +1 -1
README.md ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ datasets:
3
+ - irony
4
+ metrics:
5
+ - f1
6
+ - accuracy
7
+ model-index:
8
+ - name: cardiffnlp/twitter-roberta-base-dec2021-irony
9
+ results:
10
+ - task:
11
+ type: text-classification
12
+ name: Text Classification
13
+ dataset:
14
+ name: irony
15
+ type: tweet_eval
16
+ split: test
17
+ metrics:
18
+ - name: F1
19
+ type: f1
20
+ value: 0.7959183673469388
21
+ - name: F1 (macro)
22
+ type: f1_macro
23
+ value: 0.791350632069195
24
+ - name: Accuracy
25
+ type: accuracy
26
+ value: 0.7959183673469388
27
+ pipeline_tag: text-classification
28
+ widget:
29
+ - text: Get the all-analog Classic Vinyl Edition of "Takin' Off" Album from {@herbiehancock@} via {@bluenoterecords@} link below: {{URL}}
30
+ example_title: "Example"
31
+ ---
32
+ # cardiffnlp/twitter-roberta-base-dec2021-irony
33
+
34
+ This model is a fine-tuned version of [cardiffnlp/twitter-roberta-base-dec2021](https://huggingface.co/cardiffnlp/twitter-roberta-base-dec2021) on the
35
+ [`irony (tweet_eval))`](https://huggingface.co/datasets/irony)
36
+ via [`tweetnlp`](https://github.com/cardiffnlp/tweetnlp).
37
+ Training split is `train` and parameters have been tuned on the validation split `validation`.
38
+
39
+ Following metrics are achieved on the test split `test` ([link](https://huggingface.co/cardiffnlp/twitter-roberta-base-dec2021-irony/raw/main/metric.json)).
40
+
41
+ - F1 (micro): 0.7959183673469388
42
+ - F1 (macro): 0.791350632069195
43
+ - Accuracy: 0.7959183673469388
44
+
45
+ ### Usage
46
+ Install tweetnlp via pip.
47
+ ```shell
48
+ pip install tweetnlp
49
+ ```
50
+ Load the model in python.
51
+ ```python
52
+ import tweetnlp
53
+ model = tweetnlp.Classifier(cardiffnlp/twitter-roberta-base-dec2021-irony, max_length=128)
54
+ model.predict(Get the all-analog Classic Vinyl Edition of "Takin' Off" Album from {@herbiehancock@} via {@bluenoterecords@} link below: {{URL}})
55
+ ```
56
+
57
+ ### Reference
58
+
59
+ ```
60
+
61
+ @inproceedings{dimosthenis-etal-2022-twitter,
62
+ title = "{T}witter {T}opic {C}lassification",
63
+ author = "Antypas, Dimosthenis and
64
+ Ushio, Asahi and
65
+ Camacho-Collados, Jose and
66
+ Neves, Leonardo and
67
+ Silva, Vitor and
68
+ Barbieri, Francesco",
69
+ booktitle = "Proceedings of the 29th International Conference on Computational Linguistics",
70
+ month = oct,
71
+ year = "2022",
72
+ address = "Gyeongju, Republic of Korea",
73
+ publisher = "International Committee on Computational Linguistics"
74
+ }
75
+
76
+ ```
metric.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"eval_loss": 1.3228046894073486, "eval_f1": 0.7959183673469388, "eval_f1_macro": 0.791350632069195, "eval_accuracy": 0.7959183673469388, "eval_runtime": 2.2267, "eval_samples_per_second": 352.084, "eval_steps_per_second": 44.01}
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:78fffffdf202bbae5a084edbafa58e2852732015072d053dd3f4abd1048c994f
3
  size 498659253
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:28efa8055082cc6d82d1fa43a444f8eaa41112f443d92bf5ea27122195221d37
3
  size 498659253