verydwis commited on
Commit
9c29ba7
1 Parent(s): 2ba13d1

End of training

Browse files
Files changed (33) hide show
  1. README.md +8 -10
  2. config.json +180 -20
  3. generation_config.json +5 -0
  4. model.safetensors +2 -2
  5. runs/Dec02_03-55-44_6ce18943f435/events.out.tfevents.1701489350.6ce18943f435.397.0 +3 -0
  6. runs/Dec02_03-57-20_6ce18943f435/events.out.tfevents.1701489445.6ce18943f435.397.1 +3 -0
  7. runs/Dec02_03-58-04_6ce18943f435/events.out.tfevents.1701489487.6ce18943f435.397.2 +3 -0
  8. runs/Dec02_04-00-02_6ce18943f435/events.out.tfevents.1701489613.6ce18943f435.397.3 +3 -0
  9. runs/Dec02_04-01-18_6ce18943f435/events.out.tfevents.1701489687.6ce18943f435.397.4 +3 -0
  10. runs/Dec02_04-01-57_6ce18943f435/events.out.tfevents.1701489720.6ce18943f435.397.5 +3 -0
  11. runs/Dec02_04-03-59_6ce18943f435/events.out.tfevents.1701489843.6ce18943f435.397.6 +3 -0
  12. runs/Dec02_04-04-25_6ce18943f435/events.out.tfevents.1701489871.6ce18943f435.397.7 +3 -0
  13. runs/Dec02_04-06-12_6ce18943f435/events.out.tfevents.1701489977.6ce18943f435.397.8 +3 -0
  14. runs/Dec02_04-06-44_6ce18943f435/events.out.tfevents.1701490009.6ce18943f435.397.9 +3 -0
  15. runs/Dec02_04-07-22_6ce18943f435/events.out.tfevents.1701490046.6ce18943f435.397.10 +3 -0
  16. runs/Dec02_04-10-10_6ce18943f435/events.out.tfevents.1701490216.6ce18943f435.397.11 +3 -0
  17. runs/Dec02_04-12-10_6ce18943f435/events.out.tfevents.1701490336.6ce18943f435.397.12 +3 -0
  18. runs/Dec02_04-13-05_6ce18943f435/events.out.tfevents.1701490393.6ce18943f435.397.13 +3 -0
  19. runs/Dec02_04-13-05_6ce18943f435/events.out.tfevents.1701490416.6ce18943f435.397.14 +3 -0
  20. runs/Dec02_04-16-34_6ce18943f435/events.out.tfevents.1701490600.6ce18943f435.397.15 +3 -0
  21. runs/Dec02_04-17-18_6ce18943f435/events.out.tfevents.1701490642.6ce18943f435.397.16 +3 -0
  22. runs/Dec02_04-17-52_6ce18943f435/events.out.tfevents.1701490682.6ce18943f435.397.17 +3 -0
  23. runs/Dec02_04-18-49_6ce18943f435/events.out.tfevents.1701490733.6ce18943f435.397.18 +3 -0
  24. runs/Dec02_04-19-03_6ce18943f435/events.out.tfevents.1701490747.6ce18943f435.397.19 +3 -0
  25. runs/Dec02_04-19-35_6ce18943f435/events.out.tfevents.1701490780.6ce18943f435.397.20 +3 -0
  26. runs/Dec02_04-19-35_6ce18943f435/events.out.tfevents.1701490811.6ce18943f435.397.21 +3 -0
  27. runs/Dec02_05-18-10_6ce18943f435/events.out.tfevents.1701494296.6ce18943f435.12064.0 +3 -0
  28. runs/Dec02_11-29-38_6ce18943f435/events.out.tfevents.1701516584.6ce18943f435.12064.9 +3 -0
  29. runs/Dec02_11-31-57_6ce18943f435/events.out.tfevents.1701516722.6ce18943f435.12064.10 +3 -0
  30. runs/Dec02_11-34-07_6ce18943f435/events.out.tfevents.1701516852.6ce18943f435.12064.11 +3 -0
  31. special_tokens_map.json +0 -2
  32. tokenizer_config.json +0 -2
  33. training_args.bin +2 -2
README.md CHANGED
@@ -1,6 +1,4 @@
1
  ---
2
- license: mit
3
- base_model: cahya/bert-base-indonesian-1.5G
4
  tags:
5
  - generated_from_trainer
6
  metrics:
@@ -15,13 +13,13 @@ should probably proofread and complete it, then remove this comment. -->
15
 
16
  # liputan_6_model
17
 
18
- This model is a fine-tuned version of [cahya/bert-base-indonesian-1.5G](https://huggingface.co/cahya/bert-base-indonesian-1.5G) on the None dataset.
19
  It achieves the following results on the evaluation set:
20
- - Loss: 4.3791
21
- - Rouge1: 30.8191
22
- - Rouge2: 15.7622
23
- - Rougel: 25.4728
24
- - Rougelsum: 28.2613
25
 
26
  ## Model description
27
 
@@ -40,7 +38,7 @@ More information needed
40
  ### Training hyperparameters
41
 
42
  The following hyperparameters were used during training:
43
- - learning_rate: 5e-05
44
  - train_batch_size: 4
45
  - eval_batch_size: 4
46
  - seed: 42
@@ -53,7 +51,7 @@ The following hyperparameters were used during training:
53
 
54
  | Training Loss | Epoch | Step | Validation Loss | Rouge1 | Rouge2 | Rougel | Rougelsum |
55
  |:-------------:|:-----:|:----:|:---------------:|:-------:|:-------:|:-------:|:---------:|
56
- | 3.4606 | 0.0 | 16 | 4.3791 | 30.8191 | 15.7622 | 25.4728 | 28.2613 |
57
 
58
 
59
  ### Framework versions
 
1
  ---
 
 
2
  tags:
3
  - generated_from_trainer
4
  metrics:
 
13
 
14
  # liputan_6_model
15
 
16
+ This model is a fine-tuned version of [](https://huggingface.co/) on the None dataset.
17
  It achieves the following results on the evaluation set:
18
+ - Loss: 0.7021
19
+ - Rouge1: 20.9792
20
+ - Rouge2: 11.4128
21
+ - Rougel: 20.6501
22
+ - Rougelsum: 20.6522
23
 
24
  ## Model description
25
 
 
38
  ### Training hyperparameters
39
 
40
  The following hyperparameters were used during training:
41
+ - learning_rate: 2e-05
42
  - train_batch_size: 4
43
  - eval_batch_size: 4
44
  - seed: 42
 
51
 
52
  | Training Loss | Epoch | Step | Validation Loss | Rouge1 | Rouge2 | Rougel | Rougelsum |
53
  |:-------------:|:-----:|:----:|:---------------:|:-------:|:-------:|:-------:|:---------:|
54
+ | 0.332 | 0.0 | 10 | 0.7021 | 20.9792 | 11.4128 | 20.6501 | 20.6522 |
55
 
56
 
57
  ### Framework versions
config.json CHANGED
@@ -1,26 +1,186 @@
1
  {
2
- "_name_or_path": "cahya/bert-base-indonesian-1.5G",
3
  "architectures": [
4
- "BertForMaskedLM"
5
  ],
6
- "attention_probs_dropout_prob": 0.1,
7
- "classifier_dropout": null,
8
- "gradient_checkpointing": false,
9
- "hidden_act": "gelu",
10
- "hidden_dropout_prob": 0.1,
11
- "hidden_size": 768,
12
- "initializer_range": 0.02,
13
- "intermediate_size": 3072,
14
- "layer_norm_eps": 1e-12,
15
- "max_position_embeddings": 512,
16
- "model_type": "bert",
17
- "num_attention_heads": 12,
18
- "num_hidden_layers": 12,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
19
  "pad_token_id": 0,
20
- "position_embedding_type": "absolute",
21
  "torch_dtype": "float32",
22
- "transformers_version": "4.35.2",
23
- "type_vocab_size": 2,
24
- "use_cache": true,
25
- "vocab_size": 32000
26
  }
 
1
  {
 
2
  "architectures": [
3
+ "EncoderDecoderModel"
4
  ],
5
+ "decoder": {
6
+ "_name_or_path": "cahya/gpt2-small-indonesian-522M",
7
+ "activation_function": "gelu_new",
8
+ "add_cross_attention": true,
9
+ "architectures": [
10
+ "GPT2LMHeadModel"
11
+ ],
12
+ "attn_pdrop": 0.1,
13
+ "bad_words_ids": null,
14
+ "begin_suppress_tokens": null,
15
+ "bos_token_id": 50256,
16
+ "chunk_size_feed_forward": 0,
17
+ "cross_attention_hidden_size": null,
18
+ "decoder_start_token_id": null,
19
+ "diversity_penalty": 0.0,
20
+ "do_sample": false,
21
+ "early_stopping": false,
22
+ "embd_pdrop": 0.1,
23
+ "encoder_no_repeat_ngram_size": 0,
24
+ "eos_token_id": 50256,
25
+ "exponential_decay_length_penalty": null,
26
+ "finetuning_task": null,
27
+ "forced_bos_token_id": null,
28
+ "forced_eos_token_id": null,
29
+ "id2label": {
30
+ "0": "LABEL_0",
31
+ "1": "LABEL_1"
32
+ },
33
+ "initializer_range": 0.02,
34
+ "is_decoder": true,
35
+ "is_encoder_decoder": false,
36
+ "label2id": {
37
+ "LABEL_0": 0,
38
+ "LABEL_1": 1
39
+ },
40
+ "layer_norm_epsilon": 1e-05,
41
+ "length_penalty": 1.0,
42
+ "max_length": 20,
43
+ "min_length": 0,
44
+ "model_type": "gpt2",
45
+ "n_ctx": 1024,
46
+ "n_embd": 768,
47
+ "n_head": 12,
48
+ "n_inner": null,
49
+ "n_layer": 12,
50
+ "n_positions": 1024,
51
+ "no_repeat_ngram_size": 0,
52
+ "num_beam_groups": 1,
53
+ "num_beams": 1,
54
+ "num_return_sequences": 1,
55
+ "output_attentions": false,
56
+ "output_hidden_states": false,
57
+ "output_scores": false,
58
+ "pad_token_id": null,
59
+ "prefix": null,
60
+ "problem_type": null,
61
+ "pruned_heads": {},
62
+ "remove_invalid_values": false,
63
+ "reorder_and_upcast_attn": false,
64
+ "repetition_penalty": 1.0,
65
+ "resid_pdrop": 0.1,
66
+ "return_dict": true,
67
+ "return_dict_in_generate": false,
68
+ "scale_attn_by_inverse_layer_idx": false,
69
+ "scale_attn_weights": true,
70
+ "sep_token_id": null,
71
+ "summary_activation": null,
72
+ "summary_first_dropout": 0.1,
73
+ "summary_proj_to_labels": true,
74
+ "summary_type": "cls_index",
75
+ "summary_use_proj": true,
76
+ "suppress_tokens": null,
77
+ "task_specific_params": null,
78
+ "temperature": 1.0,
79
+ "tf_legacy_loss": false,
80
+ "tie_encoder_decoder": false,
81
+ "tie_word_embeddings": true,
82
+ "tokenizer_class": null,
83
+ "top_k": 50,
84
+ "top_p": 1.0,
85
+ "torch_dtype": null,
86
+ "torchscript": false,
87
+ "typical_p": 1.0,
88
+ "use_bfloat16": false,
89
+ "use_cache": true,
90
+ "vocab_size": 50257
91
+ },
92
+ "decoder_start_token_id": 0,
93
+ "early_stopping": true,
94
+ "encoder": {
95
+ "_name_or_path": "cahya/bert-base-indonesian-1.5G",
96
+ "add_cross_attention": false,
97
+ "architectures": [
98
+ "BertForMaskedLM"
99
+ ],
100
+ "attention_probs_dropout_prob": 0.1,
101
+ "bad_words_ids": null,
102
+ "begin_suppress_tokens": null,
103
+ "bos_token_id": null,
104
+ "chunk_size_feed_forward": 0,
105
+ "classifier_dropout": null,
106
+ "cross_attention_hidden_size": null,
107
+ "decoder_start_token_id": null,
108
+ "diversity_penalty": 0.0,
109
+ "do_sample": false,
110
+ "early_stopping": false,
111
+ "encoder_no_repeat_ngram_size": 0,
112
+ "eos_token_id": null,
113
+ "exponential_decay_length_penalty": null,
114
+ "finetuning_task": null,
115
+ "forced_bos_token_id": null,
116
+ "forced_eos_token_id": null,
117
+ "gradient_checkpointing": false,
118
+ "hidden_act": "gelu",
119
+ "hidden_dropout_prob": 0.1,
120
+ "hidden_size": 768,
121
+ "id2label": {
122
+ "0": "LABEL_0",
123
+ "1": "LABEL_1"
124
+ },
125
+ "initializer_range": 0.02,
126
+ "intermediate_size": 3072,
127
+ "is_decoder": false,
128
+ "is_encoder_decoder": false,
129
+ "label2id": {
130
+ "LABEL_0": 0,
131
+ "LABEL_1": 1
132
+ },
133
+ "layer_norm_eps": 1e-12,
134
+ "length_penalty": 1.0,
135
+ "max_length": 20,
136
+ "max_position_embeddings": 512,
137
+ "min_length": 0,
138
+ "model_type": "bert",
139
+ "no_repeat_ngram_size": 0,
140
+ "num_attention_heads": 12,
141
+ "num_beam_groups": 1,
142
+ "num_beams": 1,
143
+ "num_hidden_layers": 12,
144
+ "num_return_sequences": 1,
145
+ "output_attentions": false,
146
+ "output_hidden_states": false,
147
+ "output_scores": false,
148
+ "pad_token_id": 0,
149
+ "position_embedding_type": "absolute",
150
+ "prefix": null,
151
+ "problem_type": null,
152
+ "pruned_heads": {},
153
+ "remove_invalid_values": false,
154
+ "repetition_penalty": 1.0,
155
+ "return_dict": true,
156
+ "return_dict_in_generate": false,
157
+ "sep_token_id": null,
158
+ "suppress_tokens": null,
159
+ "task_specific_params": null,
160
+ "temperature": 1.0,
161
+ "tf_legacy_loss": false,
162
+ "tie_encoder_decoder": false,
163
+ "tie_word_embeddings": true,
164
+ "tokenizer_class": null,
165
+ "top_k": 50,
166
+ "top_p": 1.0,
167
+ "torch_dtype": null,
168
+ "torchscript": false,
169
+ "type_vocab_size": 2,
170
+ "typical_p": 1.0,
171
+ "use_bfloat16": false,
172
+ "use_cache": true,
173
+ "vocab_size": 32000
174
+ },
175
+ "eos_token_id": 0,
176
+ "is_encoder_decoder": true,
177
+ "lenght_penalty": 2.0,
178
+ "max_lenght": 40,
179
+ "min_lenght": 20,
180
+ "model_type": "encoder-decoder",
181
+ "no_repeat_gram_size": 3,
182
+ "num_beams": 10,
183
  "pad_token_id": 0,
 
184
  "torch_dtype": "float32",
185
+ "transformers_version": "4.35.2"
 
 
 
186
  }
generation_config.json CHANGED
@@ -1,4 +1,9 @@
1
  {
 
 
 
 
 
2
  "pad_token_id": 0,
3
  "transformers_version": "4.35.2"
4
  }
 
1
  {
2
+ "bos_token_id": 50256,
3
+ "decoder_start_token_id": 0,
4
+ "early_stopping": true,
5
+ "eos_token_id": 0,
6
+ "num_beams": 10,
7
  "pad_token_id": 0,
8
  "transformers_version": "4.35.2"
9
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:35b687ac5617da86f81e29875aabf06782ebc501dabbc72c2e016bdbca77cdb8
3
- size 442627224
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d4fa7417ab466b26f1c2090a25a95a7b15227d57c2a07949539d553f58b963e2
3
+ size 1053747768
runs/Dec02_03-55-44_6ce18943f435/events.out.tfevents.1701489350.6ce18943f435.397.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:75fb4fd07209d0d8f23b0036ce7c2e12d73a340befa03e046cea1f5a47395334
3
+ size 11160
runs/Dec02_03-57-20_6ce18943f435/events.out.tfevents.1701489445.6ce18943f435.397.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4d46cca2581e44722202767ca4620f8565e6e35a38e967210a55f30079a6a37c
3
+ size 10473
runs/Dec02_03-58-04_6ce18943f435/events.out.tfevents.1701489487.6ce18943f435.397.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:82740507b58e2e8b0adc7c2d28f36e9d83f7f6ac6b0ca45f5e89e72eb95673fd
3
+ size 10473
runs/Dec02_04-00-02_6ce18943f435/events.out.tfevents.1701489613.6ce18943f435.397.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ffeb31def991ff8d0ab2b0e38b7b6686a7f5f2dd35624263151b1abbb26d0c5d
3
+ size 8733
runs/Dec02_04-01-18_6ce18943f435/events.out.tfevents.1701489687.6ce18943f435.397.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3bfa166d5eeeeaf6ce673990005883b3c6b5ef87645578fb5d97c3abc5e30ac7
3
+ size 10473
runs/Dec02_04-01-57_6ce18943f435/events.out.tfevents.1701489720.6ce18943f435.397.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:96b30d83d34b14e90708e041ae1d3a15b397634168dab58069d89d90d54a06a6
3
+ size 8734
runs/Dec02_04-03-59_6ce18943f435/events.out.tfevents.1701489843.6ce18943f435.397.6 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3b2eae56e2f60f3af4f604057d93b6c948eb83bcf392d490365e03dcbc68c3e2
3
+ size 8734
runs/Dec02_04-04-25_6ce18943f435/events.out.tfevents.1701489871.6ce18943f435.397.7 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fdf1f19d149aa24b9c7429ba028644360a0a645b6074cf5b2972d1a2594037be
3
+ size 8734
runs/Dec02_04-06-12_6ce18943f435/events.out.tfevents.1701489977.6ce18943f435.397.8 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:65d75128caa0a8999b86df49415ca918015138254d192897a5043481c089d6ed
3
+ size 8734
runs/Dec02_04-06-44_6ce18943f435/events.out.tfevents.1701490009.6ce18943f435.397.9 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5666bd743f0e79d8015d07928d72bbe1e8fb97287c2b2bff192c9e9895e2a404
3
+ size 8734
runs/Dec02_04-07-22_6ce18943f435/events.out.tfevents.1701490046.6ce18943f435.397.10 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b687035dde87dce74f8840a291a312f5b352a82e0846134278f46f28aee70a2e
3
+ size 8734
runs/Dec02_04-10-10_6ce18943f435/events.out.tfevents.1701490216.6ce18943f435.397.11 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:df21b93b794618ea0be2c4ae5e1a5e8b2bc6bd5b860307de89f3fe33bc0629d8
3
+ size 8733
runs/Dec02_04-12-10_6ce18943f435/events.out.tfevents.1701490336.6ce18943f435.397.12 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:92d6ad455aadaa90344c6d1a9269a93f3e86e86827e3d4531bcb64e388194f21
3
+ size 8733
runs/Dec02_04-13-05_6ce18943f435/events.out.tfevents.1701490393.6ce18943f435.397.13 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:126c7015b839f5cba43f6b5c9b565a82c015df4a4a48e35576b58a5fce887388
3
+ size 8733
runs/Dec02_04-13-05_6ce18943f435/events.out.tfevents.1701490416.6ce18943f435.397.14 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:775fe06970bc04bdeebf5ef0d1d76f6b0b4cd095a73f792d24a4300a9dfc6c79
3
+ size 11741
runs/Dec02_04-16-34_6ce18943f435/events.out.tfevents.1701490600.6ce18943f435.397.15 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8fba96a059760f63b6505d4c7c8df8b7495dfc805702ab56faa9ae1b84134447
3
+ size 10075
runs/Dec02_04-17-18_6ce18943f435/events.out.tfevents.1701490642.6ce18943f435.397.16 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4bd9be1a9e57d2f61ebfefef22df5601d1e917a3319834488c5d3a76d72ad0ea
3
+ size 9531
runs/Dec02_04-17-52_6ce18943f435/events.out.tfevents.1701490682.6ce18943f435.397.17 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3f57e1e62092e6b88d7845af0bdcd827dbd89aeb11bc71ccf3d47f71cbe343cc
3
+ size 9532
runs/Dec02_04-18-49_6ce18943f435/events.out.tfevents.1701490733.6ce18943f435.397.18 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0104a1aa326fa3038766506987e23e5ba69ec97b012aeabe59c51c57fbd52cc9
3
+ size 8734
runs/Dec02_04-19-03_6ce18943f435/events.out.tfevents.1701490747.6ce18943f435.397.19 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:67e9d3eb62ca1342037547448700ae75bc567e779d654f48ddec770559e8a7b0
3
+ size 10076
runs/Dec02_04-19-35_6ce18943f435/events.out.tfevents.1701490780.6ce18943f435.397.20 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a8bf211bf8d0e8fadefefb34d648f3d7a8c879f991a8954dc3943c75e6209c46
3
+ size 11184
runs/Dec02_04-19-35_6ce18943f435/events.out.tfevents.1701490811.6ce18943f435.397.21 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b337c5873d58171b18d2ed3ff797b6d0c0b02c5fc43938e4be3db0e2f385635e
3
+ size 16878
runs/Dec02_05-18-10_6ce18943f435/events.out.tfevents.1701494296.6ce18943f435.12064.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5da370c9cfcb3f81ca8933ab7747e9de4f7cf6700378b17f022dc44bf6145fed
3
+ size 268023
runs/Dec02_11-29-38_6ce18943f435/events.out.tfevents.1701516584.6ce18943f435.12064.9 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6f4a36e5dbcab6da940b548589b66458804121cb9397954c11175f37b1715d48
3
+ size 9193
runs/Dec02_11-31-57_6ce18943f435/events.out.tfevents.1701516722.6ce18943f435.12064.10 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4ac40f63d2f9333a932f7c2264ec56129869b9f40817d92ff8edf031d04d4f10
3
+ size 9656
runs/Dec02_11-34-07_6ce18943f435/events.out.tfevents.1701516852.6ce18943f435.12064.11 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:acfe5178a5a8404ab6d6ff286c8a323db096e0ecd485eee0fad2b2c14e064dd8
3
+ size 9697
special_tokens_map.json CHANGED
@@ -1,7 +1,5 @@
1
  {
2
- "bos_token": "[CLS]",
3
  "cls_token": "[CLS]",
4
- "eos_token": "[SEP]",
5
  "mask_token": "[MASK]",
6
  "pad_token": "[PAD]",
7
  "sep_token": "[SEP]",
 
1
  {
 
2
  "cls_token": "[CLS]",
 
3
  "mask_token": "[MASK]",
4
  "pad_token": "[PAD]",
5
  "sep_token": "[SEP]",
tokenizer_config.json CHANGED
@@ -41,12 +41,10 @@
41
  "special": true
42
  }
43
  },
44
- "bos_token": "[CLS]",
45
  "clean_up_tokenization_spaces": true,
46
  "cls_token": "[CLS]",
47
  "do_basic_tokenize": true,
48
  "do_lower_case": true,
49
- "eos_token": "[SEP]",
50
  "full_tokenizer_file": null,
51
  "mask_token": "[MASK]",
52
  "model_max_length": 1000000000000000019884624838656,
 
41
  "special": true
42
  }
43
  },
 
44
  "clean_up_tokenization_spaces": true,
45
  "cls_token": "[CLS]",
46
  "do_basic_tokenize": true,
47
  "do_lower_case": true,
 
48
  "full_tokenizer_file": null,
49
  "mask_token": "[MASK]",
50
  "model_max_length": 1000000000000000019884624838656,
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1d34d57ff16c62aa94f4ecdac559a0f7307ec0e2ce3862c0dce742970f3cf813
3
- size 4920
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c65e8690cb80378a21222d5e5f21aa3c14889ea95a1a5fc196bdb74f61bf79ae
3
+ size 4728