asahi417 commited on
Commit
3f3f6b1
1 Parent(s): a224b57

model update

Browse files
Files changed (3) hide show
  1. README.md +6 -6
  2. best_run_hyperparameters.json +1 -1
  3. metric.json +1 -1
README.md CHANGED
@@ -17,13 +17,13 @@ model-index:
17
  metrics:
18
  - name: Micro F1 (tweet_eval/irony)
19
  type: micro_f1_tweet_eval/irony
20
- value: 0.7959183673469388
21
  - name: Macro F1 (tweet_eval/irony)
22
  type: micro_f1_tweet_eval/irony
23
- value: 0.791350632069195
24
  - name: Accuracy (tweet_eval/irony)
25
  type: accuracy_tweet_eval/irony
26
- value: 0.7959183673469388
27
  pipeline_tag: text-classification
28
  widget:
29
  - text: Get the all-analog Classic Vinyl Edition of "Takin Off" Album from {@herbiehancock@} via {@bluenoterecords@} link below {{URL}}
@@ -50,9 +50,9 @@ Training split is `train` and parameters have been tuned on the validation split
50
 
51
  Following metrics are achieved on the test split `test` ([link](https://huggingface.co/cardiffnlp/twitter-roberta-base-dec2021-irony/raw/main/metric.json)).
52
 
53
- - F1 (micro): 0.7959183673469388
54
- - F1 (macro): 0.791350632069195
55
- - Accuracy: 0.7959183673469388
56
 
57
  ### Usage
58
  Install tweetnlp via pip.
 
17
  metrics:
18
  - name: Micro F1 (tweet_eval/irony)
19
  type: micro_f1_tweet_eval/irony
20
+ value: 0.7831632653061223
21
  - name: Macro F1 (tweet_eval/irony)
22
  type: micro_f1_tweet_eval/irony
23
+ value: 0.7772250748823277
24
  - name: Accuracy (tweet_eval/irony)
25
  type: accuracy_tweet_eval/irony
26
+ value: 0.7831632653061225
27
  pipeline_tag: text-classification
28
  widget:
29
  - text: Get the all-analog Classic Vinyl Edition of "Takin Off" Album from {@herbiehancock@} via {@bluenoterecords@} link below {{URL}}
 
50
 
51
  Following metrics are achieved on the test split `test` ([link](https://huggingface.co/cardiffnlp/twitter-roberta-base-dec2021-irony/raw/main/metric.json)).
52
 
53
+ - F1 (micro): 0.7831632653061223
54
+ - F1 (macro): 0.7772250748823277
55
+ - Accuracy: 0.7831632653061225
56
 
57
  ### Usage
58
  Install tweetnlp via pip.
best_run_hyperparameters.json CHANGED
@@ -1 +1 @@
1
- {"learning_rate": 1.5930522616241033e-05, "num_train_epochs": 4, "per_device_train_batch_size": 16}
 
1
+ {"learning_rate": 2.910635913133073e-05, "num_train_epochs": 5, "per_device_train_batch_size": 4}
metric.json CHANGED
@@ -1 +1 @@
1
- {"eval_loss": 1.3228046894073486, "eval_f1": 0.7959183673469388, "eval_f1_macro": 0.791350632069195, "eval_accuracy": 0.7959183673469388, "eval_runtime": 2.2267, "eval_samples_per_second": 352.084, "eval_steps_per_second": 44.01}
 
1
+ {"eval_loss": 1.3212143182754517, "eval_f1": 0.7831632653061223, "eval_f1_macro": 0.7772250748823277, "eval_accuracy": 0.7831632653061225, "eval_runtime": 2.0561, "eval_samples_per_second": 381.312, "eval_steps_per_second": 47.664}