mrm8488 commited on
Commit
82e26c9
1 Parent(s): 9059152

Initial commit from mrm8488

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. checkpoint-1020/config.json +0 -39
  2. checkpoint-1020/merges.txt +0 -0
  3. checkpoint-1020/optimizer.pt +0 -3
  4. checkpoint-1020/pytorch_model.bin +0 -3
  5. checkpoint-1020/rng_state.pth +0 -3
  6. checkpoint-1020/scheduler.pt +0 -3
  7. checkpoint-1020/special_tokens_map.json +0 -1
  8. checkpoint-1020/tokenizer.json +0 -0
  9. checkpoint-1020/tokenizer_config.json +0 -1
  10. checkpoint-1020/trainer_state.json +0 -64
  11. checkpoint-1020/training_args.bin +0 -3
  12. checkpoint-1020/vocab.json +0 -0
  13. checkpoint-1275/config.json +0 -39
  14. checkpoint-1275/merges.txt +0 -0
  15. checkpoint-1275/optimizer.pt +0 -3
  16. checkpoint-1275/pytorch_model.bin +0 -3
  17. checkpoint-1275/rng_state.pth +0 -3
  18. checkpoint-1275/scheduler.pt +0 -3
  19. checkpoint-1275/special_tokens_map.json +0 -1
  20. checkpoint-1275/tokenizer.json +0 -0
  21. checkpoint-1275/tokenizer_config.json +0 -1
  22. checkpoint-1275/trainer_state.json +0 -73
  23. checkpoint-1275/training_args.bin +0 -3
  24. checkpoint-1275/vocab.json +0 -0
  25. checkpoint-255/config.json +0 -39
  26. checkpoint-255/merges.txt +0 -0
  27. checkpoint-255/optimizer.pt +0 -3
  28. checkpoint-255/pytorch_model.bin +0 -3
  29. checkpoint-255/rng_state.pth +0 -3
  30. checkpoint-255/scheduler.pt +0 -3
  31. checkpoint-255/special_tokens_map.json +0 -1
  32. checkpoint-255/tokenizer.json +0 -0
  33. checkpoint-255/tokenizer_config.json +0 -1
  34. checkpoint-255/trainer_state.json +0 -25
  35. checkpoint-255/training_args.bin +0 -3
  36. checkpoint-255/vocab.json +0 -0
  37. checkpoint-510/config.json +0 -39
  38. checkpoint-510/merges.txt +0 -0
  39. checkpoint-510/optimizer.pt +0 -3
  40. checkpoint-510/pytorch_model.bin +0 -3
  41. checkpoint-510/rng_state.pth +0 -3
  42. checkpoint-510/scheduler.pt +0 -3
  43. checkpoint-510/special_tokens_map.json +0 -1
  44. checkpoint-510/tokenizer.json +0 -0
  45. checkpoint-510/tokenizer_config.json +0 -1
  46. checkpoint-510/trainer_state.json +0 -40
  47. checkpoint-510/training_args.bin +0 -3
  48. checkpoint-510/vocab.json +0 -0
  49. checkpoint-765/config.json +0 -39
  50. checkpoint-765/merges.txt +0 -0
checkpoint-1020/config.json DELETED
@@ -1,39 +0,0 @@
1
- {
2
- "_name_or_path": "distilroberta-base",
3
- "architectures": [
4
- "RobertaForSequenceClassification"
5
- ],
6
- "attention_probs_dropout_prob": 0.1,
7
- "bos_token_id": 0,
8
- "classifier_dropout": null,
9
- "eos_token_id": 2,
10
- "gradient_checkpointing": false,
11
- "hidden_act": "gelu",
12
- "hidden_dropout_prob": 0.1,
13
- "hidden_size": 768,
14
- "id2label": {
15
- "0": "LABEL_0",
16
- "1": "LABEL_1",
17
- "2": "LABEL_2"
18
- },
19
- "initializer_range": 0.02,
20
- "intermediate_size": 3072,
21
- "label2id": {
22
- "LABEL_0": 0,
23
- "LABEL_1": 1,
24
- "LABEL_2": 2
25
- },
26
- "layer_norm_eps": 1e-05,
27
- "max_position_embeddings": 514,
28
- "model_type": "roberta",
29
- "num_attention_heads": 12,
30
- "num_hidden_layers": 6,
31
- "pad_token_id": 1,
32
- "position_embedding_type": "absolute",
33
- "problem_type": "single_label_classification",
34
- "torch_dtype": "float32",
35
- "transformers_version": "4.10.2",
36
- "type_vocab_size": 1,
37
- "use_cache": true,
38
- "vocab_size": 50265
39
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint-1020/merges.txt DELETED
The diff for this file is too large to render. See raw diff
 
checkpoint-1020/optimizer.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:3a10d139e099d856113f8f9bec12c657e6d97106685112c3f8f9090244f21ac6
3
- size 657026205
 
 
 
 
checkpoint-1020/pytorch_model.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:c6d24cd7c45f0b65241fd9ff1aa97814eea3ab7bdbf1458248fb9f4b2c817864
3
- size 328529005
 
 
 
 
checkpoint-1020/rng_state.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:133c6f14a48205cfdb6ebb24164c59ba141f1b3bb6282c1a0f245e16e419918e
3
- size 14503
 
 
 
 
checkpoint-1020/scheduler.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:5b5fb5b60622a213ada574e87f31d8d1d9c80a4bff68598fca269c91968c006c
3
- size 623
 
 
 
 
checkpoint-1020/special_tokens_map.json DELETED
@@ -1 +0,0 @@
1
- {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "pad_token": "<pad>", "cls_token": "<s>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": false}}
 
 
checkpoint-1020/tokenizer.json DELETED
The diff for this file is too large to render. See raw diff
 
checkpoint-1020/tokenizer_config.json DELETED
@@ -1 +0,0 @@
1
- {"unk_token": "<unk>", "bos_token": "<s>", "eos_token": "</s>", "add_prefix_space": false, "errors": "replace", "sep_token": "</s>", "cls_token": "<s>", "pad_token": "<pad>", "mask_token": "<mask>", "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "distilroberta-base", "tokenizer_class": "RobertaTokenizer"}
 
 
checkpoint-1020/trainer_state.json DELETED
@@ -1,64 +0,0 @@
1
- {
2
- "best_metric": 0.9823008849557522,
3
- "best_model_checkpoint": "/content/drive/MyDrive/distilRoberta-financial-sentiment/checkpoint-1020",
4
- "epoch": 4.0,
5
- "global_step": 1020,
6
- "is_hyper_param_search": false,
7
- "is_local_process_zero": true,
8
- "is_world_process_zero": true,
9
- "log_history": [
10
- {
11
- "epoch": 1.0,
12
- "eval_accuracy": 0.9646017699115044,
13
- "eval_loss": 0.16703279316425323,
14
- "eval_runtime": 0.4253,
15
- "eval_samples_per_second": 531.427,
16
- "eval_steps_per_second": 68.192,
17
- "step": 255
18
- },
19
- {
20
- "epoch": 1.96,
21
- "learning_rate": 1.215686274509804e-05,
22
- "loss": 0.209,
23
- "step": 500
24
- },
25
- {
26
- "epoch": 2.0,
27
- "eval_accuracy": 0.9557522123893806,
28
- "eval_loss": 0.22898824512958527,
29
- "eval_runtime": 0.4057,
30
- "eval_samples_per_second": 557.077,
31
- "eval_steps_per_second": 71.483,
32
- "step": 510
33
- },
34
- {
35
- "epoch": 3.0,
36
- "eval_accuracy": 0.9557522123893806,
37
- "eval_loss": 0.20438142120838165,
38
- "eval_runtime": 0.4213,
39
- "eval_samples_per_second": 536.442,
40
- "eval_steps_per_second": 68.835,
41
- "step": 765
42
- },
43
- {
44
- "epoch": 3.92,
45
- "learning_rate": 4.313725490196079e-06,
46
- "loss": 0.0326,
47
- "step": 1000
48
- },
49
- {
50
- "epoch": 4.0,
51
- "eval_accuracy": 0.9823008849557522,
52
- "eval_loss": 0.11158797889947891,
53
- "eval_runtime": 0.4245,
54
- "eval_samples_per_second": 532.332,
55
- "eval_steps_per_second": 68.308,
56
- "step": 1020
57
- }
58
- ],
59
- "max_steps": 1275,
60
- "num_train_epochs": 5,
61
- "total_flos": 109104889463388.0,
62
- "trial_name": null,
63
- "trial_params": null
64
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint-1020/training_args.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:ee1178219233a39de3467c1d1c9ad2fd1d976e51b9ed6bb5a459131607445eaf
3
- size 2735
 
 
 
 
checkpoint-1020/vocab.json DELETED
The diff for this file is too large to render. See raw diff
 
checkpoint-1275/config.json DELETED
@@ -1,39 +0,0 @@
1
- {
2
- "_name_or_path": "distilroberta-base",
3
- "architectures": [
4
- "RobertaForSequenceClassification"
5
- ],
6
- "attention_probs_dropout_prob": 0.1,
7
- "bos_token_id": 0,
8
- "classifier_dropout": null,
9
- "eos_token_id": 2,
10
- "gradient_checkpointing": false,
11
- "hidden_act": "gelu",
12
- "hidden_dropout_prob": 0.1,
13
- "hidden_size": 768,
14
- "id2label": {
15
- "0": "LABEL_0",
16
- "1": "LABEL_1",
17
- "2": "LABEL_2"
18
- },
19
- "initializer_range": 0.02,
20
- "intermediate_size": 3072,
21
- "label2id": {
22
- "LABEL_0": 0,
23
- "LABEL_1": 1,
24
- "LABEL_2": 2
25
- },
26
- "layer_norm_eps": 1e-05,
27
- "max_position_embeddings": 514,
28
- "model_type": "roberta",
29
- "num_attention_heads": 12,
30
- "num_hidden_layers": 6,
31
- "pad_token_id": 1,
32
- "position_embedding_type": "absolute",
33
- "problem_type": "single_label_classification",
34
- "torch_dtype": "float32",
35
- "transformers_version": "4.10.2",
36
- "type_vocab_size": 1,
37
- "use_cache": true,
38
- "vocab_size": 50265
39
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint-1275/merges.txt DELETED
The diff for this file is too large to render. See raw diff
 
checkpoint-1275/optimizer.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:75fcbedc38d2c17d18ac54e29a3a67ef140ebce4c29e1c97100eb85ffa994859
3
- size 657026205
 
 
 
 
checkpoint-1275/pytorch_model.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:f8c9cd771ae142b084f337b48bc6d3ef08344519e7e8b800d65d7581ffd1f6dc
3
- size 328529005
 
 
 
 
checkpoint-1275/rng_state.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:60e92978b58e882b21cd2cb09760e0deb38b0133cae445d92efa6a28baa781a7
3
- size 14503
 
 
 
 
checkpoint-1275/scheduler.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:c0b3ce3a7534d3726f9e692752affc1b48524ec0f784701f84651de3aca1e0f0
3
- size 623
 
 
 
 
checkpoint-1275/special_tokens_map.json DELETED
@@ -1 +0,0 @@
1
- {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "pad_token": "<pad>", "cls_token": "<s>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": false}}
 
 
checkpoint-1275/tokenizer.json DELETED
The diff for this file is too large to render. See raw diff
 
checkpoint-1275/tokenizer_config.json DELETED
@@ -1 +0,0 @@
1
- {"unk_token": "<unk>", "bos_token": "<s>", "eos_token": "</s>", "add_prefix_space": false, "errors": "replace", "sep_token": "</s>", "cls_token": "<s>", "pad_token": "<pad>", "mask_token": "<mask>", "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "distilroberta-base", "tokenizer_class": "RobertaTokenizer"}
 
 
checkpoint-1275/trainer_state.json DELETED
@@ -1,73 +0,0 @@
1
- {
2
- "best_metric": 0.9823008849557522,
3
- "best_model_checkpoint": "/content/drive/MyDrive/distilRoberta-financial-sentiment/checkpoint-1020",
4
- "epoch": 5.0,
5
- "global_step": 1275,
6
- "is_hyper_param_search": false,
7
- "is_local_process_zero": true,
8
- "is_world_process_zero": true,
9
- "log_history": [
10
- {
11
- "epoch": 1.0,
12
- "eval_accuracy": 0.9646017699115044,
13
- "eval_loss": 0.16703279316425323,
14
- "eval_runtime": 0.4253,
15
- "eval_samples_per_second": 531.427,
16
- "eval_steps_per_second": 68.192,
17
- "step": 255
18
- },
19
- {
20
- "epoch": 1.96,
21
- "learning_rate": 1.215686274509804e-05,
22
- "loss": 0.209,
23
- "step": 500
24
- },
25
- {
26
- "epoch": 2.0,
27
- "eval_accuracy": 0.9557522123893806,
28
- "eval_loss": 0.22898824512958527,
29
- "eval_runtime": 0.4057,
30
- "eval_samples_per_second": 557.077,
31
- "eval_steps_per_second": 71.483,
32
- "step": 510
33
- },
34
- {
35
- "epoch": 3.0,
36
- "eval_accuracy": 0.9557522123893806,
37
- "eval_loss": 0.20438142120838165,
38
- "eval_runtime": 0.4213,
39
- "eval_samples_per_second": 536.442,
40
- "eval_steps_per_second": 68.835,
41
- "step": 765
42
- },
43
- {
44
- "epoch": 3.92,
45
- "learning_rate": 4.313725490196079e-06,
46
- "loss": 0.0326,
47
- "step": 1000
48
- },
49
- {
50
- "epoch": 4.0,
51
- "eval_accuracy": 0.9823008849557522,
52
- "eval_loss": 0.11158797889947891,
53
- "eval_runtime": 0.4245,
54
- "eval_samples_per_second": 532.332,
55
- "eval_steps_per_second": 68.308,
56
- "step": 1020
57
- },
58
- {
59
- "epoch": 5.0,
60
- "eval_accuracy": 0.9778761061946902,
61
- "eval_loss": 0.11265852302312851,
62
- "eval_runtime": 0.431,
63
- "eval_samples_per_second": 524.402,
64
- "eval_steps_per_second": 67.291,
65
- "step": 1275
66
- }
67
- ],
68
- "max_steps": 1275,
69
- "num_train_epochs": 5,
70
- "total_flos": 136563387115644.0,
71
- "trial_name": null,
72
- "trial_params": null
73
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint-1275/training_args.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:ee1178219233a39de3467c1d1c9ad2fd1d976e51b9ed6bb5a459131607445eaf
3
- size 2735
 
 
 
 
checkpoint-1275/vocab.json DELETED
The diff for this file is too large to render. See raw diff
 
checkpoint-255/config.json DELETED
@@ -1,39 +0,0 @@
1
- {
2
- "_name_or_path": "distilroberta-base",
3
- "architectures": [
4
- "RobertaForSequenceClassification"
5
- ],
6
- "attention_probs_dropout_prob": 0.1,
7
- "bos_token_id": 0,
8
- "classifier_dropout": null,
9
- "eos_token_id": 2,
10
- "gradient_checkpointing": false,
11
- "hidden_act": "gelu",
12
- "hidden_dropout_prob": 0.1,
13
- "hidden_size": 768,
14
- "id2label": {
15
- "0": "LABEL_0",
16
- "1": "LABEL_1",
17
- "2": "LABEL_2"
18
- },
19
- "initializer_range": 0.02,
20
- "intermediate_size": 3072,
21
- "label2id": {
22
- "LABEL_0": 0,
23
- "LABEL_1": 1,
24
- "LABEL_2": 2
25
- },
26
- "layer_norm_eps": 1e-05,
27
- "max_position_embeddings": 514,
28
- "model_type": "roberta",
29
- "num_attention_heads": 12,
30
- "num_hidden_layers": 6,
31
- "pad_token_id": 1,
32
- "position_embedding_type": "absolute",
33
- "problem_type": "single_label_classification",
34
- "torch_dtype": "float32",
35
- "transformers_version": "4.10.2",
36
- "type_vocab_size": 1,
37
- "use_cache": true,
38
- "vocab_size": 50265
39
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint-255/merges.txt DELETED
The diff for this file is too large to render. See raw diff
 
checkpoint-255/optimizer.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:975b1ebc8d6a6c90134f55202316d19b3e80107d206c614021d293fd65c24106
3
- size 657026077
 
 
 
 
checkpoint-255/pytorch_model.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:b4b733235868bb0ec23026fbf9503e44701a09e253c725f4e3c48d9010d9119b
3
- size 328529005
 
 
 
 
checkpoint-255/rng_state.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:8a3a992b385bfed8a7947f3349b47e31d3a41012ea7e604ba8930022525ee395
3
- size 14503
 
 
 
 
checkpoint-255/scheduler.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:bf9b70b1312ed602ec4c2ce9c481f61d1ea9d352599c28f99624d803ef1abbb2
3
- size 623
 
 
 
 
checkpoint-255/special_tokens_map.json DELETED
@@ -1 +0,0 @@
1
- {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "pad_token": "<pad>", "cls_token": "<s>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": false}}
 
 
checkpoint-255/tokenizer.json DELETED
The diff for this file is too large to render. See raw diff
 
checkpoint-255/tokenizer_config.json DELETED
@@ -1 +0,0 @@
1
- {"unk_token": "<unk>", "bos_token": "<s>", "eos_token": "</s>", "add_prefix_space": false, "errors": "replace", "sep_token": "</s>", "cls_token": "<s>", "pad_token": "<pad>", "mask_token": "<mask>", "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "distilroberta-base", "tokenizer_class": "RobertaTokenizer"}
 
 
checkpoint-255/trainer_state.json DELETED
@@ -1,25 +0,0 @@
1
- {
2
- "best_metric": 0.9646017699115044,
3
- "best_model_checkpoint": "/content/drive/MyDrive/distilRoberta-financial-sentiment/checkpoint-255",
4
- "epoch": 1.0,
5
- "global_step": 255,
6
- "is_hyper_param_search": false,
7
- "is_local_process_zero": true,
8
- "is_world_process_zero": true,
9
- "log_history": [
10
- {
11
- "epoch": 1.0,
12
- "eval_accuracy": 0.9646017699115044,
13
- "eval_loss": 0.16703279316425323,
14
- "eval_runtime": 0.4253,
15
- "eval_samples_per_second": 531.427,
16
- "eval_steps_per_second": 68.192,
17
- "step": 255
18
- }
19
- ],
20
- "max_steps": 1275,
21
- "num_train_epochs": 5,
22
- "total_flos": 27420205611960.0,
23
- "trial_name": null,
24
- "trial_params": null
25
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint-255/training_args.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:ee1178219233a39de3467c1d1c9ad2fd1d976e51b9ed6bb5a459131607445eaf
3
- size 2735
 
 
 
 
checkpoint-255/vocab.json DELETED
The diff for this file is too large to render. See raw diff
 
checkpoint-510/config.json DELETED
@@ -1,39 +0,0 @@
1
- {
2
- "_name_or_path": "distilroberta-base",
3
- "architectures": [
4
- "RobertaForSequenceClassification"
5
- ],
6
- "attention_probs_dropout_prob": 0.1,
7
- "bos_token_id": 0,
8
- "classifier_dropout": null,
9
- "eos_token_id": 2,
10
- "gradient_checkpointing": false,
11
- "hidden_act": "gelu",
12
- "hidden_dropout_prob": 0.1,
13
- "hidden_size": 768,
14
- "id2label": {
15
- "0": "LABEL_0",
16
- "1": "LABEL_1",
17
- "2": "LABEL_2"
18
- },
19
- "initializer_range": 0.02,
20
- "intermediate_size": 3072,
21
- "label2id": {
22
- "LABEL_0": 0,
23
- "LABEL_1": 1,
24
- "LABEL_2": 2
25
- },
26
- "layer_norm_eps": 1e-05,
27
- "max_position_embeddings": 514,
28
- "model_type": "roberta",
29
- "num_attention_heads": 12,
30
- "num_hidden_layers": 6,
31
- "pad_token_id": 1,
32
- "position_embedding_type": "absolute",
33
- "problem_type": "single_label_classification",
34
- "torch_dtype": "float32",
35
- "transformers_version": "4.10.2",
36
- "type_vocab_size": 1,
37
- "use_cache": true,
38
- "vocab_size": 50265
39
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint-510/merges.txt DELETED
The diff for this file is too large to render. See raw diff
 
checkpoint-510/optimizer.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:8d8bcdb6b232b75268539ac908094b87c2485cf9f72e43a602596e731db2ac2a
3
- size 657026205
 
 
 
 
checkpoint-510/pytorch_model.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:6d037b8e1adbc44aa7ac023e87cbc070f9e38fbff4760759f77d7908d06af361
3
- size 328529005
 
 
 
 
checkpoint-510/rng_state.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:48a061e86b42b51652875e3d2f31f7c4fa9b52250106d33553f74eb06c82ac04
3
- size 14503
 
 
 
 
checkpoint-510/scheduler.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:fcc0a142708d96432e37e864dacf92fbffed7431324f0234396de039f88c9435
3
- size 623
 
 
 
 
checkpoint-510/special_tokens_map.json DELETED
@@ -1 +0,0 @@
1
- {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "pad_token": "<pad>", "cls_token": "<s>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": false}}
 
 
checkpoint-510/tokenizer.json DELETED
The diff for this file is too large to render. See raw diff
 
checkpoint-510/tokenizer_config.json DELETED
@@ -1 +0,0 @@
1
- {"unk_token": "<unk>", "bos_token": "<s>", "eos_token": "</s>", "add_prefix_space": false, "errors": "replace", "sep_token": "</s>", "cls_token": "<s>", "pad_token": "<pad>", "mask_token": "<mask>", "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "distilroberta-base", "tokenizer_class": "RobertaTokenizer"}
 
 
checkpoint-510/trainer_state.json DELETED
@@ -1,40 +0,0 @@
1
- {
2
- "best_metric": 0.9646017699115044,
3
- "best_model_checkpoint": "/content/drive/MyDrive/distilRoberta-financial-sentiment/checkpoint-255",
4
- "epoch": 2.0,
5
- "global_step": 510,
6
- "is_hyper_param_search": false,
7
- "is_local_process_zero": true,
8
- "is_world_process_zero": true,
9
- "log_history": [
10
- {
11
- "epoch": 1.0,
12
- "eval_accuracy": 0.9646017699115044,
13
- "eval_loss": 0.16703279316425323,
14
- "eval_runtime": 0.4253,
15
- "eval_samples_per_second": 531.427,
16
- "eval_steps_per_second": 68.192,
17
- "step": 255
18
- },
19
- {
20
- "epoch": 1.96,
21
- "learning_rate": 1.215686274509804e-05,
22
- "loss": 0.209,
23
- "step": 500
24
- },
25
- {
26
- "epoch": 2.0,
27
- "eval_accuracy": 0.9557522123893806,
28
- "eval_loss": 0.22898824512958527,
29
- "eval_runtime": 0.4057,
30
- "eval_samples_per_second": 557.077,
31
- "eval_steps_per_second": 71.483,
32
- "step": 510
33
- }
34
- ],
35
- "max_steps": 1275,
36
- "num_train_epochs": 5,
37
- "total_flos": 54781938243468.0,
38
- "trial_name": null,
39
- "trial_params": null
40
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint-510/training_args.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:ee1178219233a39de3467c1d1c9ad2fd1d976e51b9ed6bb5a459131607445eaf
3
- size 2735
 
 
 
 
checkpoint-510/vocab.json DELETED
The diff for this file is too large to render. See raw diff
 
checkpoint-765/config.json DELETED
@@ -1,39 +0,0 @@
1
- {
2
- "_name_or_path": "distilroberta-base",
3
- "architectures": [
4
- "RobertaForSequenceClassification"
5
- ],
6
- "attention_probs_dropout_prob": 0.1,
7
- "bos_token_id": 0,
8
- "classifier_dropout": null,
9
- "eos_token_id": 2,
10
- "gradient_checkpointing": false,
11
- "hidden_act": "gelu",
12
- "hidden_dropout_prob": 0.1,
13
- "hidden_size": 768,
14
- "id2label": {
15
- "0": "LABEL_0",
16
- "1": "LABEL_1",
17
- "2": "LABEL_2"
18
- },
19
- "initializer_range": 0.02,
20
- "intermediate_size": 3072,
21
- "label2id": {
22
- "LABEL_0": 0,
23
- "LABEL_1": 1,
24
- "LABEL_2": 2
25
- },
26
- "layer_norm_eps": 1e-05,
27
- "max_position_embeddings": 514,
28
- "model_type": "roberta",
29
- "num_attention_heads": 12,
30
- "num_hidden_layers": 6,
31
- "pad_token_id": 1,
32
- "position_embedding_type": "absolute",
33
- "problem_type": "single_label_classification",
34
- "torch_dtype": "float32",
35
- "transformers_version": "4.10.2",
36
- "type_vocab_size": 1,
37
- "use_cache": true,
38
- "vocab_size": 50265
39
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint-765/merges.txt DELETED
The diff for this file is too large to render. See raw diff