francisco-perez-sorrosal commited on
Commit
4cfb57a
1 Parent(s): de42150

End of training

Browse files
last-checkpoint/config.json DELETED
@@ -1,36 +0,0 @@
1
- {
2
- "_name_or_path": "distilbert-base-multilingual-cased",
3
- "activation": "gelu",
4
- "architectures": [
5
- "DistilBertForSequenceClassification"
6
- ],
7
- "attention_dropout": 0.1,
8
- "dim": 768,
9
- "dropout": 0.1,
10
- "hidden_dim": 3072,
11
- "id2label": {
12
- "0": "P",
13
- "1": "NEU",
14
- "2": "N"
15
- },
16
- "initializer_range": 0.02,
17
- "label2id": {
18
- "N": 2,
19
- "NEU": 1,
20
- "P": 0
21
- },
22
- "max_position_embeddings": 512,
23
- "model_type": "distilbert",
24
- "n_heads": 12,
25
- "n_layers": 6,
26
- "output_past": true,
27
- "pad_token_id": 0,
28
- "problem_type": "single_label_classification",
29
- "qa_dropout": 0.1,
30
- "seq_classif_dropout": 0.2,
31
- "sinusoidal_pos_embds": false,
32
- "tie_weights_": true,
33
- "torch_dtype": "float32",
34
- "transformers_version": "4.26.0",
35
- "vocab_size": 119547
36
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
last-checkpoint/optimizer.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:9c9b08eb3bd43ed93dbcb8c2596efae585382d0ffa0bc801326206dea663cef0
3
- size 1082673861
 
 
 
 
last-checkpoint/pytorch_model.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:4f0a615ffdab692a6b7cdb171da5d1e966e393872aa466fa1892e126d11858ee
3
- size 541341997
 
 
 
 
last-checkpoint/rng_state.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:fa76fe52fd649c3885f94de74fa828974782e09ea5a116494bafe08931499b21
3
- size 13553
 
 
 
 
last-checkpoint/scheduler.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:aa65b58ab89e6113985723068f5721b82cd7855ba30b3811b50b5d914c50e528
3
- size 627
 
 
 
 
last-checkpoint/special_tokens_map.json DELETED
@@ -1,7 +0,0 @@
1
- {
2
- "cls_token": "[CLS]",
3
- "mask_token": "[MASK]",
4
- "pad_token": "[PAD]",
5
- "sep_token": "[SEP]",
6
- "unk_token": "[UNK]"
7
- }
 
 
 
 
 
 
 
 
last-checkpoint/tokenizer.json DELETED
The diff for this file is too large to render. See raw diff
 
last-checkpoint/tokenizer_config.json DELETED
@@ -1,17 +0,0 @@
1
- {
2
- "cls_token": "[CLS]",
3
- "do_lower_case": false,
4
- "mask_token": "[MASK]",
5
- "max_length": 72,
6
- "model_max_length": 512,
7
- "name_or_path": "distilbert-base-multilingual-cased",
8
- "pad_token": "[PAD]",
9
- "padding": "max_length",
10
- "sep_token": "[SEP]",
11
- "special_tokens_map_file": null,
12
- "strip_accents": null,
13
- "tokenize_chinese_chars": true,
14
- "tokenizer_class": "DistilBertTokenizer",
15
- "truncation": true,
16
- "unk_token": "[UNK]"
17
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
last-checkpoint/trainer_state.json DELETED
@@ -1,70 +0,0 @@
1
- {
2
- "best_metric": 0.6050104105453544,
3
- "best_model_checkpoint": "/Users/fperez/dev/data/spanishclassfier_exp/distilbert-base-multilingual-cased-finetuned-with-spanish-tweets-clf-cleaned-ds/ep_4-lr_5e-5-msl_72-bs_8/checkpoint-1629",
4
- "epoch": 3.0,
5
- "global_step": 1629,
6
- "is_hyper_param_search": false,
7
- "is_local_process_zero": true,
8
- "is_world_process_zero": true,
9
- "log_history": [
10
- {
11
- "epoch": 1.0,
12
- "learning_rate": 3.7500000000000003e-05,
13
- "loss": 1.018,
14
- "step": 543
15
- },
16
- {
17
- "epoch": 1.0,
18
- "eval_accuracy": 0.5535590877677954,
19
- "eval_f1": 0.4949397774870696,
20
- "eval_loss": 0.942139744758606,
21
- "eval_precision": 0.5346677326820676,
22
- "eval_recall": 0.5146316846159721,
23
- "eval_runtime": 80.7708,
24
- "eval_samples_per_second": 17.915,
25
- "eval_steps_per_second": 0.57,
26
- "step": 543
27
- },
28
- {
29
- "epoch": 2.0,
30
- "learning_rate": 2.5e-05,
31
- "loss": 0.8079,
32
- "step": 1086
33
- },
34
- {
35
- "epoch": 2.0,
36
- "eval_accuracy": 0.5957152729785764,
37
- "eval_f1": 0.575132441321117,
38
- "eval_loss": 0.9275487661361694,
39
- "eval_precision": 0.5920976186364554,
40
- "eval_recall": 0.5724889819336446,
41
- "eval_runtime": 41.3201,
42
- "eval_samples_per_second": 35.019,
43
- "eval_steps_per_second": 1.113,
44
- "step": 1086
45
- },
46
- {
47
- "epoch": 3.0,
48
- "learning_rate": 1.25e-05,
49
- "loss": 0.521,
50
- "step": 1629
51
- },
52
- {
53
- "epoch": 3.0,
54
- "eval_accuracy": 0.6033172080165861,
55
- "eval_f1": 0.6050104105453544,
56
- "eval_loss": 1.1208082437515259,
57
- "eval_precision": 0.6145903381180479,
58
- "eval_recall": 0.602289094420588,
59
- "eval_runtime": 46.7494,
60
- "eval_samples_per_second": 30.952,
61
- "eval_steps_per_second": 0.984,
62
- "step": 1629
63
- }
64
- ],
65
- "max_steps": 2172,
66
- "num_train_epochs": 4,
67
- "total_flos": 242543853074880.0,
68
- "trial_name": null,
69
- "trial_params": null
70
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
last-checkpoint/training_args.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:547db2c34b063e23ee11f17e4bff5def40cf9fa9cf46e3e436df969a7238cc1c
3
- size 3835
 
 
 
 
last-checkpoint/vocab.txt DELETED
The diff for this file is too large to render. See raw diff