simcse-fakenews-unsup-fact-v1 / trainer_state.json
Iftitahu's picture
Upload 7 files
d810661 verified
raw
history blame contribute delete
No virus
12.5 kB
{
"best_metric": 0.5689655172413793,
"best_model_checkpoint": "result/unsup/simcse-indobert-fact-FakeCLSTrain-precise",
"epoch": 1.4229249011857708,
"global_step": 360,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.08,
"eval_CR": 73.51,
"eval_FakeCLSDev": 0.9859484777517564,
"eval_FakeCLSTest": 0.5161290322580645,
"eval_FakeCLSTrain": 0.47105788423153694,
"eval_FakePairs": -0.0655808472434033,
"eval_FakePairsNLI": 61.47,
"eval_MPQA": 77.42,
"eval_MR": 62.27,
"eval_MRPC": 69.77,
"eval_SST2": 69.5,
"eval_SUBJ": 83.28,
"eval_TREC": 63.68,
"eval_avg_sts": 0.615711473961408,
"eval_avg_transfer": 71.34714285714286,
"eval_sickr_spearman": 0.5665051314046887,
"eval_stsb_indo": 0.33437727467476286,
"eval_stsb_spearman": 0.6649178165181273,
"step": 20
},
{
"epoch": 0.16,
"eval_CR": 72.68,
"eval_FakeCLSDev": 0.9851767388825542,
"eval_FakeCLSTest": 0.4231433506044905,
"eval_FakeCLSTrain": 0.5489130434782609,
"eval_FakePairs": -0.04097804462346638,
"eval_FakePairsNLI": 61.0,
"eval_MPQA": 77.55,
"eval_MR": 61.07,
"eval_MRPC": 69.36,
"eval_SST2": 69.61,
"eval_SUBJ": 82.67,
"eval_TREC": 64.01,
"eval_avg_sts": 0.6012550116719224,
"eval_avg_transfer": 70.99285714285715,
"eval_sickr_spearman": 0.5668606095864323,
"eval_stsb_indo": 0.3166064743127013,
"eval_stsb_spearman": 0.6356494137574127,
"step": 40
},
{
"epoch": 0.24,
"eval_CR": 73.47,
"eval_FakeCLSDev": 0.9851598173515982,
"eval_FakeCLSTest": 0.49099099099099097,
"eval_FakeCLSTrain": 0.5536723163841808,
"eval_FakePairs": -0.03922070157918517,
"eval_FakePairsNLI": 61.62,
"eval_MPQA": 77.38,
"eval_MR": 62.28,
"eval_MRPC": 69.77,
"eval_SST2": 68.69,
"eval_SUBJ": 83.33,
"eval_TREC": 64.75,
"eval_avg_sts": 0.608615640622381,
"eval_avg_transfer": 71.38142857142857,
"eval_sickr_spearman": 0.5717063713483206,
"eval_stsb_indo": 0.3395385317873131,
"eval_stsb_spearman": 0.6455249098964414,
"step": 60
},
{
"epoch": 0.32,
"eval_CR": 73.54,
"eval_FakeCLSDev": 0.9851598173515982,
"eval_FakeCLSTest": 0.4665271966527197,
"eval_FakeCLSTrain": 0.56,
"eval_FakePairs": -0.058711233524849485,
"eval_FakePairsNLI": 60.53,
"eval_MPQA": 77.36,
"eval_MR": 62.75,
"eval_MRPC": 69.97,
"eval_SST2": 69.95,
"eval_SUBJ": 83.69,
"eval_TREC": 64.69,
"eval_avg_sts": 0.6091550301498823,
"eval_avg_transfer": 71.70714285714287,
"eval_sickr_spearman": 0.5691681677928235,
"eval_stsb_indo": 0.35668523348337833,
"eval_stsb_spearman": 0.6491418925069411,
"step": 80
},
{
"epoch": 0.4,
"eval_CR": 72.86,
"eval_FakeCLSDev": 0.9851767388825542,
"eval_FakeCLSTest": 0.43423423423423424,
"eval_FakeCLSTrain": 0.5497237569060773,
"eval_FakePairs": -0.06198628192555537,
"eval_FakePairsNLI": 63.34,
"eval_MPQA": 77.43,
"eval_MR": 62.37,
"eval_MRPC": 70.02,
"eval_SST2": 69.72,
"eval_SUBJ": 83.97,
"eval_TREC": 62.91,
"eval_avg_sts": 0.6086202878060443,
"eval_avg_transfer": 71.32571428571428,
"eval_sickr_spearman": 0.5628235474794574,
"eval_stsb_indo": 0.3583996091555714,
"eval_stsb_spearman": 0.6544170281326311,
"step": 100
},
{
"epoch": 0.47,
"eval_CR": 73.16,
"eval_FakeCLSDev": 0.9851767388825542,
"eval_FakeCLSTest": 0.43783783783783786,
"eval_FakeCLSTrain": 0.5668604651162791,
"eval_FakePairs": -0.061826523466984355,
"eval_FakePairsNLI": 63.03,
"eval_MPQA": 77.29,
"eval_MR": 62.36,
"eval_MRPC": 70.27,
"eval_SST2": 67.89,
"eval_SUBJ": 83.77,
"eval_TREC": 64.22,
"eval_avg_sts": 0.6163408060409252,
"eval_avg_transfer": 71.27999999999999,
"eval_sickr_spearman": 0.5696943965401795,
"eval_stsb_indo": 0.3670526673661317,
"eval_stsb_spearman": 0.6629872155416708,
"step": 120
},
{
"epoch": 0.55,
"eval_CR": 73.07,
"eval_FakeCLSDev": 0.9851767388825542,
"eval_FakeCLSTest": 0.41919191919191917,
"eval_FakeCLSTrain": 0.556497175141243,
"eval_FakePairs": -0.06653939799482943,
"eval_FakePairsNLI": 70.2,
"eval_MPQA": 77.45,
"eval_MR": 62.45,
"eval_MRPC": 69.9,
"eval_SST2": 68.46,
"eval_SUBJ": 83.58,
"eval_TREC": 63.08,
"eval_avg_sts": 0.6153345841426848,
"eval_avg_transfer": 71.14142857142856,
"eval_sickr_spearman": 0.5701103458786335,
"eval_stsb_indo": 0.3509797527191032,
"eval_stsb_spearman": 0.6605588224067361,
"step": 140
},
{
"epoch": 0.63,
"eval_CR": 73.5,
"eval_FakeCLSDev": 0.9851598173515982,
"eval_FakeCLSTest": 0.4753747323340471,
"eval_FakeCLSTrain": 0.5612535612535613,
"eval_FakePairs": -0.0680571033512541,
"eval_FakePairsNLI": 65.83,
"eval_MPQA": 77.08,
"eval_MR": 62.5,
"eval_MRPC": 69.8,
"eval_SST2": 69.5,
"eval_SUBJ": 83.92,
"eval_TREC": 63.7,
"eval_avg_sts": 0.6138085030907702,
"eval_avg_transfer": 71.42857142857143,
"eval_sickr_spearman": 0.5714391262999188,
"eval_stsb_indo": 0.35272703771997466,
"eval_stsb_spearman": 0.6561778798816216,
"step": 160
},
{
"epoch": 0.71,
"eval_CR": 72.79,
"eval_FakeCLSDev": 0.9851513420902341,
"eval_FakeCLSTest": 0.49417249417249415,
"eval_FakeCLSTrain": 0.5285359801488834,
"eval_FakePairs": -0.06949492947839328,
"eval_FakePairsNLI": 62.71,
"eval_MPQA": 76.76,
"eval_MR": 62.73,
"eval_MRPC": 70.02,
"eval_SST2": 68.58,
"eval_SUBJ": 84.43,
"eval_TREC": 62.42,
"eval_avg_sts": 0.6196401183056581,
"eval_avg_transfer": 71.10428571428572,
"eval_sickr_spearman": 0.5703694256397907,
"eval_stsb_indo": 0.36643910488045184,
"eval_stsb_spearman": 0.6689108109715255,
"step": 180
},
{
"epoch": 0.79,
"eval_CR": 72.87,
"eval_FakeCLSDev": 0.9851767388825542,
"eval_FakeCLSTest": 0.44014732965009207,
"eval_FakeCLSTrain": 0.5657142857142857,
"eval_FakePairs": -0.07093275560553244,
"eval_FakePairsNLI": 62.56,
"eval_MPQA": 76.85,
"eval_MR": 62.55,
"eval_MRPC": 70.34,
"eval_SST2": 66.28,
"eval_SUBJ": 83.69,
"eval_TREC": 62.9,
"eval_avg_sts": 0.6117092253437233,
"eval_avg_transfer": 70.78285714285714,
"eval_sickr_spearman": 0.562769224303731,
"eval_stsb_indo": 0.37225410713674706,
"eval_stsb_spearman": 0.6606492263837157,
"step": 200
},
{
"epoch": 0.87,
"eval_CR": 72.81,
"eval_FakeCLSDev": 0.9851598173515982,
"eval_FakeCLSTest": 0.4673684210526316,
"eval_FakeCLSTrain": 0.547945205479452,
"eval_FakePairs": -0.054158117455575444,
"eval_FakePairsNLI": 63.65,
"eval_MPQA": 77.75,
"eval_MR": 62.64,
"eval_MRPC": 70.29,
"eval_SST2": 69.38,
"eval_SUBJ": 83.87,
"eval_TREC": 63.48,
"eval_avg_sts": 0.6109666311280921,
"eval_avg_transfer": 71.46000000000001,
"eval_sickr_spearman": 0.5690365625748831,
"eval_stsb_indo": 0.359153644946237,
"eval_stsb_spearman": 0.652896699681301,
"step": 220
},
{
"epoch": 0.95,
"eval_CR": 73.15,
"eval_FakeCLSDev": 0.9851767388825542,
"eval_FakeCLSTest": 0.42757417102966844,
"eval_FakeCLSTrain": 0.5528455284552846,
"eval_FakePairs": -0.044652489170599816,
"eval_FakePairsNLI": 60.69,
"eval_MPQA": 77.47,
"eval_MR": 62.17,
"eval_MRPC": 70.34,
"eval_SST2": 67.09,
"eval_SUBJ": 83.56,
"eval_TREC": 62.33,
"eval_avg_sts": 0.6128236398775047,
"eval_avg_transfer": 70.87285714285714,
"eval_sickr_spearman": 0.5620660489786768,
"eval_stsb_indo": 0.3265208518275815,
"eval_stsb_spearman": 0.6635812307763325,
"step": 240
},
{
"epoch": 1.03,
"eval_CR": 73.03,
"eval_FakeCLSDev": 0.9851513420902341,
"eval_FakeCLSTest": 0.4306306306306306,
"eval_FakeCLSTrain": 0.5597667638483965,
"eval_FakePairs": -0.05024404074786382,
"eval_FakePairsNLI": 61.62,
"eval_MPQA": 77.18,
"eval_MR": 61.87,
"eval_MRPC": 70.0,
"eval_SST2": 67.43,
"eval_SUBJ": 83.27,
"eval_TREC": 62.67,
"eval_avg_sts": 0.6131921270133698,
"eval_avg_transfer": 70.77857142857144,
"eval_sickr_spearman": 0.5614766113018284,
"eval_stsb_indo": 0.3244277933781677,
"eval_stsb_spearman": 0.6649076427249111,
"step": 260
},
{
"epoch": 1.11,
"eval_CR": 73.01,
"eval_FakeCLSDev": 0.9861030689056167,
"eval_FakeCLSTest": 0.5215189873417722,
"eval_FakeCLSTrain": 0.5498652291105122,
"eval_FakePairs": -0.04281527160708269,
"eval_FakePairsNLI": 60.53,
"eval_MPQA": 77.48,
"eval_MR": 61.83,
"eval_MRPC": 69.9,
"eval_SST2": 69.27,
"eval_SUBJ": 82.46,
"eval_TREC": 63.98,
"eval_avg_sts": 0.6053801581830358,
"eval_avg_transfer": 71.13285714285715,
"eval_sickr_spearman": 0.5599164170338142,
"eval_stsb_indo": 0.3103923440865193,
"eval_stsb_spearman": 0.6508438993322574,
"step": 280
},
{
"epoch": 1.19,
"eval_CR": 72.83,
"eval_FakeCLSDev": 0.9856073690270581,
"eval_FakeCLSTest": 0.516209476309227,
"eval_FakeCLSTrain": 0.5464190981432361,
"eval_FakePairs": -0.020449082697090434,
"eval_FakePairsNLI": 60.53,
"eval_MPQA": 77.52,
"eval_MR": 61.56,
"eval_MRPC": 69.9,
"eval_SST2": 68.23,
"eval_SUBJ": 82.49,
"eval_TREC": 64.05,
"eval_avg_sts": 0.5997509995903676,
"eval_avg_transfer": 70.94000000000001,
"eval_sickr_spearman": 0.5583291812556904,
"eval_stsb_indo": 0.2918589228675973,
"eval_stsb_spearman": 0.6411728179250447,
"step": 300
},
{
"epoch": 1.26,
"eval_CR": 72.67,
"eval_FakeCLSDev": 0.9851936218678815,
"eval_FakeCLSTest": 0.5277777777777778,
"eval_FakeCLSTrain": 0.5513513513513514,
"eval_FakePairs": -0.0066299760306972886,
"eval_FakePairsNLI": 60.53,
"eval_MPQA": 77.09,
"eval_MR": 62.07,
"eval_MRPC": 70.22,
"eval_SST2": 68.0,
"eval_SUBJ": 82.45,
"eval_TREC": 64.58,
"eval_avg_sts": 0.6007609352858567,
"eval_avg_transfer": 71.01142857142857,
"eval_sickr_spearman": 0.5664013842255825,
"eval_stsb_indo": 0.3162610592900211,
"eval_stsb_spearman": 0.6351204863461308,
"step": 320
},
{
"epoch": 1.34,
"eval_CR": 72.99,
"eval_FakeCLSDev": 0.993006993006993,
"eval_FakeCLSTest": 0.30303030303030304,
"eval_FakeCLSTrain": 0.5580110497237569,
"eval_FakePairs": -0.02212654651208613,
"eval_FakePairsNLI": 63.03,
"eval_MPQA": 77.68,
"eval_MR": 62.61,
"eval_MRPC": 69.87,
"eval_SST2": 68.23,
"eval_SUBJ": 83.02,
"eval_TREC": 65.72,
"eval_avg_sts": 0.6010452523806686,
"eval_avg_transfer": 71.44571428571429,
"eval_sickr_spearman": 0.5730101275657566,
"eval_stsb_indo": 0.33175461948179213,
"eval_stsb_spearman": 0.6290803771955807,
"step": 340
},
{
"epoch": 1.42,
"eval_CR": 72.46,
"eval_FakeCLSDev": 1.0,
"eval_FakeCLSTest": 0.0,
"eval_FakeCLSTrain": 0.5689655172413793,
"eval_FakePairs": -0.022206425741371643,
"eval_FakePairsNLI": 60.53,
"eval_MPQA": 77.87,
"eval_MR": 62.74,
"eval_MRPC": 70.34,
"eval_SST2": 68.81,
"eval_SUBJ": 83.4,
"eval_TREC": 66.23,
"eval_avg_sts": 0.6023165873556424,
"eval_avg_transfer": 71.69285714285715,
"eval_sickr_spearman": 0.5723746280626291,
"eval_stsb_indo": 0.3481165947946874,
"eval_stsb_spearman": 0.6322585466486558,
"step": 360
}
],
"max_steps": 759,
"num_train_epochs": 3,
"total_flos": 0,
"trial_name": null,
"trial_params": null
}