juliamendelsohn commited on
Commit
7b39b10
1 Parent(s): 0cd14fc

commit from juliame

Browse files
config.json ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "RobertaForMultiLabelSequenceClassification"
4
+ ],
5
+ "attention_probs_dropout_prob": 0.1,
6
+ "bos_token_id": 0,
7
+ "eos_token_id": 2,
8
+ "gradient_checkpointing": false,
9
+ "hidden_act": "gelu",
10
+ "hidden_dropout_prob": 0.1,
11
+ "hidden_size": 768,
12
+ "id2label": {
13
+ "0": "LABEL_0",
14
+ "1": "LABEL_1",
15
+ "2": "LABEL_2",
16
+ "3": "LABEL_3",
17
+ "4": "LABEL_4",
18
+ "5": "LABEL_5",
19
+ "6": "LABEL_6",
20
+ "7": "LABEL_7",
21
+ "8": "LABEL_8",
22
+ "9": "LABEL_9",
23
+ "10": "LABEL_10",
24
+ "11": "LABEL_11",
25
+ "12": "LABEL_12",
26
+ "13": "LABEL_13"
27
+ },
28
+ "initializer_range": 0.02,
29
+ "intermediate_size": 3072,
30
+ "label2id": {
31
+ "LABEL_0": 0,
32
+ "LABEL_1": 1,
33
+ "LABEL_10": 10,
34
+ "LABEL_11": 11,
35
+ "LABEL_12": 12,
36
+ "LABEL_13": 13,
37
+ "LABEL_2": 2,
38
+ "LABEL_3": 3,
39
+ "LABEL_4": 4,
40
+ "LABEL_5": 5,
41
+ "LABEL_6": 6,
42
+ "LABEL_7": 7,
43
+ "LABEL_8": 8,
44
+ "LABEL_9": 9
45
+ },
46
+ "layer_norm_eps": 1e-05,
47
+ "max_position_embeddings": 514,
48
+ "model_type": "roberta",
49
+ "num_attention_heads": 12,
50
+ "num_hidden_layers": 12,
51
+ "pad_token_id": 1,
52
+ "type_vocab_size": 1,
53
+ "vocab_size": 50265
54
+ }
eval_results.txt ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ LRAP = 0.892502622685955
2
+ eval_loss = 0.23466304304045543
3
+ macro_f1 = 0.7259145332742358
4
+ micro_f1 = 0.7786259541984734
5
+ weighted_f1 = 0.7748483424794298
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model_args.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"adam_epsilon": 1e-08, "best_model_dir": "/shared/2/projects/framing/models/classify/Issue-General/11-03-20_60_epochs_default_thresh_12_seed/best_model", "cache_dir": "cache_dir/", "config": {}, "custom_layer_parameters": [], "custom_parameter_groups": [], "dataloader_num_workers": 14, "do_lower_case": false, "dynamic_quantize": false, "early_stopping_consider_epochs": false, "early_stopping_delta": 0, "early_stopping_metric": "eval_loss", "early_stopping_metric_minimize": true, "early_stopping_patience": 20, "encoding": null, "eval_batch_size": 8, "evaluate_during_training": true, "evaluate_during_training_silent": true, "evaluate_during_training_steps": 100, "evaluate_during_training_verbose": false, "fp16": false, "gradient_accumulation_steps": 1, "learning_rate": 4e-05, "local_rank": -1, "logging_steps": 50, "manual_seed": 12, "max_grad_norm": 1.0, "max_seq_length": 128, "model_name": "/shared/2/projects/framing/models/finetune/roberta_cased_09-01-20", "model_type": "roberta", "multiprocessing_chunksize": 500, "n_gpu": 1, "no_cache": false, "no_save": false, "num_train_epochs": 60, "output_dir": "/shared/2/projects/framing/models/classify/Issue-General/11-03-20_60_epochs_default_thresh_12_seed", "overwrite_output_dir": true, "process_count": 14, "quantized_model": false, "reprocess_input_data": true, "save_best_model": true, "save_eval_checkpoints": true, "save_model_every_epoch": true, "save_optimizer_and_scheduler": true, "save_steps": 2000, "silent": false, "tensorboard_dir": null, "thread_count": null, "train_batch_size": 8, "train_custom_parameters_only": false, "use_cached_eval_features": false, "use_early_stopping": true, "use_multiprocessing": true, "wandb_kwargs": {}, "wandb_project": null, "warmup_ratio": 0.06, "warmup_steps": 1620, "weight_decay": 0, "model_class": "MultiLabelClassificationModel", "sliding_window": false, "stride": 0.8, "threshold": 0.5, "tie_value": 1, "labels_list": [], "labels_map": {}, "lazy_loading": false}
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:70089174cbee8041e49f9d3ece74e5ea021afb7b4aeaec0a7507b8c314e3fda6
3
+ size 501079387
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "pad_token": "<pad>", "cls_token": "<s>", "mask_token": "<mask>"}
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"model_max_length": 512, "do_lower_case": false, "special_tokens_map_file": "/shared/2/projects/framing/models/finetune/roberta_cased_09-01-20/special_tokens_map.json", "full_tokenizer_file": null}
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fbc00ec97d4a705363e14a053018704748bfeb57c4c810e3d22377c28fff7d4a
3
+ size 2479
training_progress_scores.csv ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ global_step,LRAP,train_loss,eval_loss,macro_f1,weighted_f1,micro_f1
2
+ 100,0.49517042217042123,0.5851673483848572,0.5588188819717943,0.04664936332050546,0.07607174724774828,0.09222886421861655
3
+ 200,0.5208749660832989,0.3791176378726959,0.4263942408979985,0.0,0.0,0.0
4
+ 300,0.546149528352306,0.3298804461956024,0.40748437297971624,0.0,0.0,0.0
5
+ 400,0.5859014920264919,0.4834856390953064,0.39078071295169364,0.01426990900675111,0.03010161960796043,0.03242147922998987
6
+ 450,0.6433627339738447,0.34385839104652405,0.37329806727275516,0.05500466698052199,0.09953513274618292,0.11753371868978806
7
+ 500,0.6844985952319285,0.37006911635398865,0.35097177117540124,0.16556906139520172,0.26832739843375136,0.3405275779376499
8
+ 600,0.7469535731757956,0.2587157189846039,0.31583157600018014,0.24118228676963346,0.3718108162835448,0.4558823529411764
9
+ 700,0.8039844727289174,0.33606284856796265,0.29391343264203323,0.4299181858521028,0.5440843103326621,0.5931558935361216
10
+ 800,0.8340183593155817,0.21565081179141998,0.2683152362965701,0.49214921245328347,0.5950782286794557,0.6223337515683813
11
+ 900,0.8508108316580539,0.2793205678462982,0.2514987370946951,0.43623627076629606,0.5593830429447101,0.6122448979591836
12
+ 900,0.8508108316580539,0.2793205678462982,0.2514987370946951,0.43623627076629606,0.5593830429447101,0.6122448979591836
13
+ 1000,0.8612577934411264,0.18915380537509918,0.2369406923913119,0.5668846072806962,0.6614025039563474,0.6906729634002362
14
+ 1100,0.872423978387867,0.18642736971378326,0.23017527934229165,0.6093686818206121,0.6940764468499311,0.717065036131184
15
+ 1200,0.8698748014331339,0.18087641894817352,0.22494757463011825,0.5882897671045619,0.6749945891129,0.7018160515524312
16
+ 1300,0.8753145406034295,0.1971210390329361,0.22193141111679243,0.5977629259333298,0.6771882595305366,0.702280912364946
17
+ 1350,0.8751003997031774,0.19553999602794647,0.2149686907467089,0.6255744233435693,0.7077209423588136,0.7286036036036037
18
+ 1400,0.8796602117018784,0.13111352920532227,0.21083084075597294,0.6406101471002029,0.7109059385478351,0.7337807606263983
19
+ 1500,0.8842798972632305,0.17609044909477234,0.20430217356535427,0.6535181233240352,0.7231424815634989,0.7376509330406147
20
+ 1600,0.8779117869784537,0.0680193156003952,0.20967365932046322,0.6358362526598731,0.7099013365278147,0.7309417040358744
21
+ 1700,0.8837815701787918,0.11457131803035736,0.20939396413271888,0.6451717397630224,0.7212019843370354,0.7398593834505138
22
+ 1800,0.8896327820327814,0.10776479542255402,0.2009195393899031,0.6826068003133404,0.7383907878018762,0.7495943753380205
23
+ 1800,0.8896327820327814,0.10776479542255402,0.2009195393899031,0.6826068003133404,0.7383907878018762,0.7495943753380205
24
+ 1900,0.8880346813680149,0.07660847157239914,0.1985036639267938,0.6667298471511679,0.7360738293251483,0.7551589514779699
25
+ 2000,0.8898697817614483,0.03555149957537651,0.20077398968370339,0.6734777589313347,0.7384601584569371,0.7523650528658876
26
+ 2100,0.8867521182521182,0.051547933369874954,0.20762131912143608,0.6776247285843823,0.7406506936409527,0.7529162248144221
27
+ 2200,0.8915603279436612,0.04129895567893982,0.21144162445214756,0.6837385940571574,0.7446417044323497,0.7556968733439322
28
+ 2250,0.8895807871141201,0.03669261932373047,0.2066752160981036,0.6919494789050521,0.7497284605897521,0.7569847127042699
29
+ 2300,0.8926034817034814,0.03504105657339096,0.20268693683963074,0.7000891505587007,0.7504258243161078,0.7577374599786553
30
+ 2400,0.8901966607466606,0.04220922663807869,0.2132203475032982,0.6798103290735533,0.7394468188542488,0.7542735042735043
31
+ 2500,0.886735957252624,0.036895960569381714,0.215842880504696,0.7037931207879496,0.7588472993114846,0.7625452664252457
32
+ 2600,0.8936432117265444,0.03052089549601078,0.2133913676728282,0.7031162862658876,0.7590947624402573,0.7662473794549267
33
+ 2700,0.8931694984028312,0.027315404266119003,0.22733164447964282,0.699525568534146,0.7568567663206978,0.7617586912065439
34
+ 2700,0.8931694984028312,0.027315404266119003,0.22733164447964282,0.699525568534146,0.7568567663206978,0.7617586912065439
35
+ 2800,0.8956396282729616,0.01632537506520748,0.21437923780135942,0.7072687251188666,0.7644864645985755,0.7687564234326825
36
+ 2900,0.8921067833401163,0.03892175853252411,0.21228947967552303,0.713019241986434,0.7673169166971197,0.7711340206185566
37
+ 3000,0.8995346431346426,0.028772462159395218,0.2094113152278097,0.7131416721336642,0.7697478263586978,0.7772020725388601
38
+ 3100,0.8970715691715686,0.018464110791683197,0.21741718583201108,0.7062959132606847,0.7647064936744621,0.771604938271605
39
+ 3150,0.8980190494690494,0.019953792914748192,0.22124001017788,0.7008181415650473,0.7609972043967228,0.7692307692307693
40
+ 3200,0.9003905341572009,0.012464622966945171,0.2197075594674077,0.7053962523779961,0.7632667565465354,0.7700258397932817
41
+ 3300,0.8959309159975827,0.01991826854646206,0.2258751556407987,0.695914883804425,0.7607497840160904,0.7698616094310609
42
+ 3400,0.9003791430791428,0.008648190647363663,0.22840365087776854,0.7163119146203822,0.7726790174163348,0.7790756729304216
43
+ 3500,0.8974578908745572,0.011495017446577549,0.22338530996389555,0.6922482838520437,0.7561315726300972,0.7670514165792235
44
+ 3600,0.8977866466866463,0.0095455851405859,0.23200650761524835,0.7154136088186338,0.7662748138534528,0.7721968543886352
45
+ 3600,0.8977866466866463,0.0095455851405859,0.23200650761524835,0.7154136088186338,0.7662748138534528,0.7721968543886352
46
+ 3700,0.9000112871696201,0.006714036222547293,0.23010387506924176,0.7109124291106568,0.7640663027468645,0.7708867247565352
47
+ 3800,0.8936887550720873,0.005543881095945835,0.2377601506417258,0.7083361687337952,0.7609088471937488,0.7680168332456602
48
+ 3900,0.8994065514732178,0.008245917037129402,0.23087657765861144,0.7145634473994218,0.7697872119381398,0.7759506680369991
49
+ 4000,0.892502622685955,0.010644347406923771,0.23466304304045543,0.7259145332742358,0.7748483424794298,0.7786259541984734
vocab.json ADDED
The diff for this file is too large to render. See raw diff