simcse-fakenews-unsup-title-v1 / trainer_state.json
Iftitahu's picture
Upload 9 files
aff4df6 verified
raw
history blame
6.49 kB
{
"best_metric": 0.5580736543909348,
"best_model_checkpoint": "result/unsup/simcse-indobert-title-FakeCLSTrain-precise",
"epoch": 0.6617647058823529,
"global_step": 180,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.07,
"eval_CR": 73.16,
"eval_FakeCLSDev": 0.9839605181986428,
"eval_FakeCLSTest": 0.46304347826086956,
"eval_FakeCLSTrain": 0.5288220551378446,
"eval_FakePairs": 0.005351908362129137,
"eval_FakePairsNLI": 65.37,
"eval_MPQA": 77.6,
"eval_MR": 62.74,
"eval_MRPC": 69.87,
"eval_SST2": 69.5,
"eval_SUBJ": 83.35,
"eval_TREC": 61.56,
"eval_avg_sts": 0.608427864595204,
"eval_avg_transfer": 71.11142857142858,
"eval_sickr_spearman": 0.5697081334351907,
"eval_stsb_indo": 0.35816575526410505,
"eval_stsb_spearman": 0.6471475957552174,
"step": 20
},
{
"epoch": 0.15,
"eval_CR": 73.5,
"eval_FakeCLSDev": 0.9844054580896686,
"eval_FakeCLSTest": 0.4142857142857143,
"eval_FakeCLSTrain": 0.5319693094629157,
"eval_FakePairs": 0.0055116668207001555,
"eval_FakePairsNLI": 61.31,
"eval_MPQA": 77.5,
"eval_MR": 63.16,
"eval_MRPC": 70.39,
"eval_SST2": 70.53,
"eval_SUBJ": 83.61,
"eval_TREC": 63.37,
"eval_avg_sts": 0.6099489196482747,
"eval_avg_transfer": 71.72285714285714,
"eval_sickr_spearman": 0.5782534427542483,
"eval_stsb_indo": 0.3617575143911094,
"eval_stsb_spearman": 0.6416443965423011,
"step": 40
},
{
"epoch": 0.22,
"eval_CR": 72.97,
"eval_FakeCLSDev": 0.9841149773071104,
"eval_FakeCLSTest": 0.4196642685851319,
"eval_FakeCLSTrain": 0.5306122448979592,
"eval_FakePairs": -0.002236618419994266,
"eval_FakePairsNLI": 60.53,
"eval_MPQA": 77.47,
"eval_MR": 62.79,
"eval_MRPC": 70.71,
"eval_SST2": 69.72,
"eval_SUBJ": 84.08,
"eval_TREC": 62.77,
"eval_avg_sts": 0.6049352302914398,
"eval_avg_transfer": 71.50142857142856,
"eval_sickr_spearman": 0.5781722701928178,
"eval_stsb_indo": 0.37879410677523756,
"eval_stsb_spearman": 0.6316981903900618,
"step": 60
},
{
"epoch": 0.29,
"eval_CR": 72.45,
"eval_FakeCLSDev": 0.9840783833435395,
"eval_FakeCLSTest": 0.4339250493096647,
"eval_FakeCLSTrain": 0.5267175572519084,
"eval_FakePairs": 0.026200390087916275,
"eval_FakePairsNLI": 60.53,
"eval_MPQA": 77.75,
"eval_MR": 62.75,
"eval_MRPC": 70.46,
"eval_SST2": 70.99,
"eval_SUBJ": 84.77,
"eval_TREC": 64.05,
"eval_avg_sts": 0.5918733193015971,
"eval_avg_transfer": 71.88857142857142,
"eval_sickr_spearman": 0.5672788644177553,
"eval_stsb_indo": 0.394149879866618,
"eval_stsb_spearman": 0.616467774185439,
"step": 80
},
{
"epoch": 0.37,
"eval_CR": 73.41,
"eval_FakeCLSDev": 0.9850316637881404,
"eval_FakeCLSTest": 0.42805755395683454,
"eval_FakeCLSTrain": 0.5404699738903395,
"eval_FakePairs": 0.0520812574941522,
"eval_FakePairsNLI": 60.53,
"eval_MPQA": 77.23,
"eval_MR": 62.73,
"eval_MRPC": 71.3,
"eval_SST2": 70.76,
"eval_SUBJ": 85.06,
"eval_TREC": 63.7,
"eval_avg_sts": 0.6083871006060083,
"eval_avg_transfer": 72.02714285714286,
"eval_sickr_spearman": 0.5814145136331947,
"eval_stsb_indo": 0.41193116007398217,
"eval_stsb_spearman": 0.6353596875788219,
"step": 100
},
{
"epoch": 0.44,
"eval_CR": 73.37,
"eval_FakeCLSDev": 0.9851767388825542,
"eval_FakeCLSTest": 0.4190800681431005,
"eval_FakeCLSTrain": 0.5223880597014925,
"eval_FakePairs": 0.03538649857348071,
"eval_FakePairsNLI": 60.53,
"eval_MPQA": 77.45,
"eval_MR": 62.65,
"eval_MRPC": 70.61,
"eval_SST2": 71.67,
"eval_SUBJ": 84.81,
"eval_TREC": 64.86,
"eval_avg_sts": 0.6016284111780268,
"eval_avg_transfer": 72.20285714285716,
"eval_sickr_spearman": 0.5820290715760954,
"eval_stsb_indo": 0.41269918214526696,
"eval_stsb_spearman": 0.6212277507799581,
"step": 120
},
{
"epoch": 0.51,
"eval_CR": 73.31,
"eval_FakeCLSDev": 0.9851767388825542,
"eval_FakeCLSTest": 0.41989881956155145,
"eval_FakeCLSTrain": 0.5362694300518135,
"eval_FakePairs": 0.03498710242705316,
"eval_FakePairsNLI": 60.53,
"eval_MPQA": 77.25,
"eval_MR": 62.17,
"eval_MRPC": 70.9,
"eval_SST2": 68.81,
"eval_SUBJ": 84.36,
"eval_TREC": 64.2,
"eval_avg_sts": 0.6051854166798671,
"eval_avg_transfer": 71.57142857142857,
"eval_sickr_spearman": 0.5806598489673991,
"eval_stsb_indo": 0.40615316280106933,
"eval_stsb_spearman": 0.6297109843923352,
"step": 140
},
{
"epoch": 0.59,
"eval_CR": 72.74,
"eval_FakeCLSDev": 0.9851767388825542,
"eval_FakeCLSTest": 0.4189189189189189,
"eval_FakeCLSTrain": 0.5157384987893463,
"eval_FakePairs": 0.05176174057701016,
"eval_FakePairsNLI": 62.56,
"eval_MPQA": 77.04,
"eval_MR": 62.38,
"eval_MRPC": 71.44,
"eval_SST2": 69.61,
"eval_SUBJ": 84.4,
"eval_TREC": 62.58,
"eval_avg_sts": 0.6072393638619159,
"eval_avg_transfer": 71.45571428571428,
"eval_sickr_spearman": 0.585786112361686,
"eval_stsb_indo": 0.3949280161232874,
"eval_stsb_spearman": 0.6286926153621458,
"step": 160
},
{
"epoch": 0.66,
"eval_CR": 73.03,
"eval_FakeCLSDev": 0.9851767388825542,
"eval_FakeCLSTest": 0.41765704584040747,
"eval_FakeCLSTrain": 0.5580736543909348,
"eval_FakePairs": 0.049445242927730386,
"eval_FakePairsNLI": 60.53,
"eval_MPQA": 77.37,
"eval_MR": 62.6,
"eval_MRPC": 70.44,
"eval_SST2": 70.18,
"eval_SUBJ": 84.29,
"eval_TREC": 62.6,
"eval_avg_sts": 0.5972569987657406,
"eval_avg_transfer": 71.50142857142858,
"eval_sickr_spearman": 0.5824195163996858,
"eval_stsb_indo": 0.3994525439625476,
"eval_stsb_spearman": 0.6120944811317954,
"step": 180
}
],
"max_steps": 816,
"num_train_epochs": 3,
"total_flos": 0,
"trial_name": null,
"trial_params": null
}