noahkim commited on
Commit
3a14299
1 Parent(s): 6801730

Training in progress, step 500

Browse files
config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
- "_commit_hash": null,
3
- "_name_or_path": "/content/drive/MyDrive/project/kobigbird/new_model",
4
  "architectures": [
5
  "EncoderDecoderModel"
6
  ],
@@ -16,6 +16,7 @@
16
  ],
17
  "attention_dropout": 0.0,
18
  "bad_words_ids": null,
 
19
  "bos_token_id": 0,
20
  "chunk_size_feed_forward": 0,
21
  "classif_dropout": 0.1,
@@ -82,6 +83,7 @@
82
  "scale_embedding": false,
83
  "sep_token_id": null,
84
  "static_position_embeddings": false,
 
85
  "task_specific_params": {
86
  "summarization": {
87
  "length_penalty": 1.0,
@@ -99,7 +101,7 @@
99
  "top_p": 1.0,
100
  "torch_dtype": null,
101
  "torchscript": false,
102
- "transformers_version": "4.22.0",
103
  "typical_p": 1.0,
104
  "use_bfloat16": false,
105
  "use_cache": true,
@@ -115,6 +117,7 @@
115
  "attention_probs_dropout_prob": 0.1,
116
  "attention_type": "block_sparse",
117
  "bad_words_ids": null,
 
118
  "block_size": 64,
119
  "bos_token_id": 5,
120
  "chunk_size_feed_forward": 0,
@@ -174,6 +177,7 @@
174
  "return_dict": true,
175
  "return_dict_in_generate": false,
176
  "sep_token_id": 3,
 
177
  "task_specific_params": null,
178
  "temperature": 1.0,
179
  "tf_legacy_loss": false,
@@ -184,7 +188,7 @@
184
  "top_p": 1.0,
185
  "torch_dtype": "float32",
186
  "torchscript": false,
187
- "transformers_version": "4.22.0",
188
  "type_vocab_size": 2,
189
  "typical_p": 1.0,
190
  "use_bfloat16": false,
 
1
  {
2
+ "_commit_hash": "68017302937fe01c24c12a1e2a991e7730d55254",
3
+ "_name_or_path": "noahkim/KoBigBird-KoBart-News-Summarization",
4
  "architectures": [
5
  "EncoderDecoderModel"
6
  ],
 
16
  ],
17
  "attention_dropout": 0.0,
18
  "bad_words_ids": null,
19
+ "begin_suppress_tokens": null,
20
  "bos_token_id": 0,
21
  "chunk_size_feed_forward": 0,
22
  "classif_dropout": 0.1,
 
83
  "scale_embedding": false,
84
  "sep_token_id": null,
85
  "static_position_embeddings": false,
86
+ "suppress_tokens": null,
87
  "task_specific_params": {
88
  "summarization": {
89
  "length_penalty": 1.0,
 
101
  "top_p": 1.0,
102
  "torch_dtype": null,
103
  "torchscript": false,
104
+ "transformers_version": "4.24.0",
105
  "typical_p": 1.0,
106
  "use_bfloat16": false,
107
  "use_cache": true,
 
117
  "attention_probs_dropout_prob": 0.1,
118
  "attention_type": "block_sparse",
119
  "bad_words_ids": null,
120
+ "begin_suppress_tokens": null,
121
  "block_size": 64,
122
  "bos_token_id": 5,
123
  "chunk_size_feed_forward": 0,
 
177
  "return_dict": true,
178
  "return_dict_in_generate": false,
179
  "sep_token_id": 3,
180
+ "suppress_tokens": null,
181
  "task_specific_params": null,
182
  "temperature": 1.0,
183
  "tf_legacy_loss": false,
 
188
  "top_p": 1.0,
189
  "torch_dtype": "float32",
190
  "torchscript": false,
191
+ "transformers_version": "4.24.0",
192
  "type_vocab_size": 2,
193
  "typical_p": 1.0,
194
  "use_bfloat16": false,
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f49605c04043cbd7105b7a7883eff3c08f470e47ff10337b3b53e566046432f7
3
  size 599515441
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:997433bccf3482849a26d95d9e631386fbf14134f44900c2b5ebe72640a255a8
3
  size 599515441
runs/Nov10_00-25-23_37464bceb0fa/1668040157.0621037/events.out.tfevents.1668040157.37464bceb0fa.107.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:163dc0f2918ebcdce1430846b6cf5629085f57d056f90b112ad52aa08196739b
3
+ size 5773
runs/Nov10_00-25-23_37464bceb0fa/events.out.tfevents.1668040157.37464bceb0fa.107.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d0952d8c8ee56e5a9e2cdbc66d858e69684eec61ac3d857a6ea208f1654e7466
3
+ size 9053
tokenizer.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6f2079f94fe625159615946294d7d128775a2ce17a9e800c54f8dfce42de5084
3
- size 1049772
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bbc9caa9787b275ca179d2cd9dd93ec63872166989282deaeb9c5fef76862312
3
+ size 1049605
tokenizer_config.json CHANGED
@@ -6,7 +6,7 @@
6
  "eos_token": "</s>",
7
  "errors": "replace",
8
  "mask_token": "<mask>",
9
- "name_or_path": "/content/drive/MyDrive/project/kobigbird/new_model",
10
  "pad_token": "<pad>",
11
  "sep_token": "</s>",
12
  "special_tokens_map_file": null,
 
6
  "eos_token": "</s>",
7
  "errors": "replace",
8
  "mask_token": "<mask>",
9
+ "name_or_path": "noahkim/KoBigBird-KoBart-News-Summarization",
10
  "pad_token": "<pad>",
11
  "sep_token": "</s>",
12
  "special_tokens_map_file": null,
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:26b23c3b7508e5882b1f4468f6af3db382e819eaa0948ca9e65c25dd54a45d34
3
  size 3567
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:72f3088c78cbe1b555c8d747542c2681278de56a53811dab1e9344b8e8c52967
3
  size 3567