noahkim commited on
Commit
6f2b714
1 Parent(s): 73b29ae

Training in progress, step 500

Browse files
.gitattributes CHANGED
@@ -30,3 +30,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
30
  *.zip filter=lfs diff=lfs merge=lfs -text
31
  *.zst filter=lfs diff=lfs merge=lfs -text
32
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
30
  *.zip filter=lfs diff=lfs merge=lfs -text
31
  *.zst filter=lfs diff=lfs merge=lfs -text
32
  *tfevents* filter=lfs diff=lfs merge=lfs -text
33
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
config.json CHANGED
@@ -1,98 +1,111 @@
1
  {
2
  "_commit_hash": null,
3
- "_name_or_path": "/content/drive/MyDrive/project/kobigbird/first_model",
4
- "add_cross_attention": true,
5
  "architectures": [
6
  "EncoderDecoderModel"
7
  ],
8
- "attention_type": "original_full",
9
  "decoder": {
10
- "_name_or_path": "monologg/kobigbird-bert-base",
 
 
 
11
  "add_cross_attention": true,
 
12
  "architectures": [
13
- "BigBirdForMaskedLM"
14
  ],
15
- "attention_probs_dropout_prob": 0.1,
16
- "attention_type": "block_sparse",
17
  "bad_words_ids": null,
18
- "block_size": 64,
19
- "bos_token_id": 5,
20
  "chunk_size_feed_forward": 0,
21
- "classifier_dropout": null,
 
22
  "cross_attention_hidden_size": null,
23
- "decoder_start_token_id": null,
 
 
 
 
 
24
  "diversity_penalty": 0.0,
 
25
  "do_sample": false,
 
26
  "early_stopping": false,
 
 
 
 
27
  "encoder_no_repeat_ngram_size": 0,
28
- "eos_token_id": 6,
29
  "exponential_decay_length_penalty": null,
 
30
  "finetuning_task": null,
 
31
  "forced_bos_token_id": null,
32
- "forced_eos_token_id": null,
33
  "gradient_checkpointing": false,
34
- "hidden_act": "gelu_new",
35
- "hidden_dropout_prob": 0.1,
36
- "hidden_size": 768,
37
  "id2label": {
38
- "0": "LABEL_0",
39
- "1": "LABEL_1"
40
  },
41
- "initializer_range": 0.02,
42
- "intermediate_size": 3072,
43
  "is_decoder": true,
44
- "is_encoder_decoder": false,
45
  "label2id": {
46
- "LABEL_0": 0,
47
- "LABEL_1": 1
48
  },
49
- "layer_norm_eps": 1e-12,
50
  "length_penalty": 1.0,
51
  "max_length": 20,
52
- "max_position_embeddings": 4096,
53
  "min_length": 0,
54
- "model_type": "big_bird",
55
  "no_repeat_ngram_size": 0,
56
- "num_attention_heads": 6,
 
57
  "num_beam_groups": 1,
58
  "num_beams": 1,
59
  "num_hidden_layers": 6,
60
- "num_random_blocks": 3,
61
  "num_return_sequences": 1,
62
  "output_attentions": false,
63
  "output_hidden_states": false,
64
  "output_scores": false,
65
- "pad_token_id": 0,
66
- "position_embedding_type": "absolute",
67
  "prefix": null,
68
  "problem_type": null,
69
  "pruned_heads": {},
70
  "remove_invalid_values": false,
71
  "repetition_penalty": 1.0,
72
- "rescale_embeddings": false,
73
  "return_dict": true,
74
  "return_dict_in_generate": false,
75
- "sep_token_id": 3,
76
- "task_specific_params": null,
 
 
 
 
 
 
 
 
 
77
  "temperature": 1.0,
78
  "tf_legacy_loss": false,
79
  "tie_encoder_decoder": false,
80
  "tie_word_embeddings": true,
81
- "tokenizer_class": "BertTokenizer",
82
  "top_k": 50,
83
  "top_p": 1.0,
84
- "torch_dtype": "float32",
85
  "torchscript": false,
86
  "transformers_version": "4.22.0",
87
- "type_vocab_size": 2,
88
  "typical_p": 1.0,
89
  "use_bfloat16": false,
90
- "use_bias": true,
91
  "use_cache": true,
92
- "vocab_size": 32500
93
  },
94
- "decoder_layers": 6,
95
- "decoder_start_token_id": 2,
96
  "encoder": {
97
  "_name_or_path": "monologg/kobigbird-bert-base",
98
  "add_cross_attention": false,
@@ -107,10 +120,11 @@
107
  "chunk_size_feed_forward": 0,
108
  "classifier_dropout": null,
109
  "cross_attention_hidden_size": null,
110
- "decoder_start_token_id": null,
111
  "diversity_penalty": 0.0,
112
  "do_sample": false,
113
  "early_stopping": false,
 
114
  "encoder_no_repeat_ngram_size": 0,
115
  "eos_token_id": 6,
116
  "exponential_decay_length_penalty": null,
@@ -149,7 +163,7 @@
149
  "output_attentions": false,
150
  "output_hidden_states": false,
151
  "output_scores": false,
152
- "pad_token_id": 0,
153
  "position_embedding_type": "absolute",
154
  "prefix": null,
155
  "problem_type": null,
@@ -176,16 +190,11 @@
176
  "use_bfloat16": false,
177
  "use_bias": true,
178
  "use_cache": true,
179
- "vocab_size": 32500
180
  },
181
- "encoder_layers": 6,
182
- "is_decoder": true,
183
  "is_encoder_decoder": true,
184
  "model_type": "encoder-decoder",
185
- "num_attention_heads": 6,
186
- "num_hidden_layers": 6,
187
- "pad_token_id": 0,
188
  "torch_dtype": "float32",
189
- "transformers_version": null,
190
- "vocab_size": 32500
191
  }
 
1
  {
2
  "_commit_hash": null,
3
+ "_name_or_path": "/content/drive/MyDrive/project/kobigbird/new_model",
 
4
  "architectures": [
5
  "EncoderDecoderModel"
6
  ],
 
7
  "decoder": {
8
+ "_name_or_path": "ainize/kobart-news",
9
+ "activation_dropout": 0.0,
10
+ "activation_function": "gelu",
11
+ "add_bias_logits": false,
12
  "add_cross_attention": true,
13
+ "add_final_layer_norm": false,
14
  "architectures": [
15
+ "BartForConditionalGeneration"
16
  ],
17
+ "attention_dropout": 0.0,
 
18
  "bad_words_ids": null,
19
+ "bos_token_id": 0,
 
20
  "chunk_size_feed_forward": 0,
21
+ "classif_dropout": 0.1,
22
+ "classifier_dropout": 0.1,
23
  "cross_attention_hidden_size": null,
24
+ "d_model": 768,
25
+ "decoder_attention_heads": 6,
26
+ "decoder_ffn_dim": 3072,
27
+ "decoder_layerdrop": 0.0,
28
+ "decoder_layers": 6,
29
+ "decoder_start_token_id": 0,
30
  "diversity_penalty": 0.0,
31
+ "do_blenderbot_90_layernorm": false,
32
  "do_sample": false,
33
+ "dropout": 0.1,
34
  "early_stopping": false,
35
+ "encoder_attention_heads": 16,
36
+ "encoder_ffn_dim": 3072,
37
+ "encoder_layerdrop": 0.0,
38
+ "encoder_layers": 6,
39
  "encoder_no_repeat_ngram_size": 0,
40
+ "eos_token_id": 1,
41
  "exponential_decay_length_penalty": null,
42
+ "extra_pos_embeddings": 2,
43
  "finetuning_task": null,
44
+ "force_bos_token_to_be_generated": false,
45
  "forced_bos_token_id": null,
46
+ "forced_eos_token_id": 2,
47
  "gradient_checkpointing": false,
 
 
 
48
  "id2label": {
49
+ "0": "NEGATIVE",
50
+ "1": "POSITIVE"
51
  },
52
+ "init_std": 0.02,
 
53
  "is_decoder": true,
54
+ "is_encoder_decoder": true,
55
  "label2id": {
56
+ "NEGATIVE": 0,
57
+ "POSITIVE": 1
58
  },
 
59
  "length_penalty": 1.0,
60
  "max_length": 20,
61
+ "max_position_embeddings": 1026,
62
  "min_length": 0,
63
+ "model_type": "bart",
64
  "no_repeat_ngram_size": 0,
65
+ "normalize_before": false,
66
+ "normalize_embedding": true,
67
  "num_beam_groups": 1,
68
  "num_beams": 1,
69
  "num_hidden_layers": 6,
 
70
  "num_return_sequences": 1,
71
  "output_attentions": false,
72
  "output_hidden_states": false,
73
  "output_scores": false,
74
+ "pad_token_id": 3,
 
75
  "prefix": null,
76
  "problem_type": null,
77
  "pruned_heads": {},
78
  "remove_invalid_values": false,
79
  "repetition_penalty": 1.0,
 
80
  "return_dict": true,
81
  "return_dict_in_generate": false,
82
+ "scale_embedding": false,
83
+ "sep_token_id": null,
84
+ "static_position_embeddings": false,
85
+ "task_specific_params": {
86
+ "summarization": {
87
+ "length_penalty": 1.0,
88
+ "max_length": 128,
89
+ "min_length": 12,
90
+ "num_beams": 4
91
+ }
92
+ },
93
  "temperature": 1.0,
94
  "tf_legacy_loss": false,
95
  "tie_encoder_decoder": false,
96
  "tie_word_embeddings": true,
97
+ "tokenizer_class": null,
98
  "top_k": 50,
99
  "top_p": 1.0,
100
+ "torch_dtype": null,
101
  "torchscript": false,
102
  "transformers_version": "4.22.0",
 
103
  "typical_p": 1.0,
104
  "use_bfloat16": false,
 
105
  "use_cache": true,
106
+ "vocab_size": 30000
107
  },
108
+ "decoder_start_token_id": 0,
 
109
  "encoder": {
110
  "_name_or_path": "monologg/kobigbird-bert-base",
111
  "add_cross_attention": false,
 
120
  "chunk_size_feed_forward": 0,
121
  "classifier_dropout": null,
122
  "cross_attention_hidden_size": null,
123
+ "decoder_start_token_id": 0,
124
  "diversity_penalty": 0.0,
125
  "do_sample": false,
126
  "early_stopping": false,
127
+ "encoder_layers": 6,
128
  "encoder_no_repeat_ngram_size": 0,
129
  "eos_token_id": 6,
130
  "exponential_decay_length_penalty": null,
 
163
  "output_attentions": false,
164
  "output_hidden_states": false,
165
  "output_scores": false,
166
+ "pad_token_id": 3,
167
  "position_embedding_type": "absolute",
168
  "prefix": null,
169
  "problem_type": null,
 
190
  "use_bfloat16": false,
191
  "use_bias": true,
192
  "use_cache": true,
193
+ "vocab_size": 30000
194
  },
 
 
195
  "is_encoder_decoder": true,
196
  "model_type": "encoder-decoder",
197
+ "pad_token_id": 3,
 
 
198
  "torch_dtype": "float32",
199
+ "transformers_version": null
 
200
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c9a3be97724979cecd41e5f5de6ace62863f02570fccefdf72c99550a857ca9e
3
- size 629203807
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3c565a530a893b515d527b715d8d63dc9ec85130bb90d68b68e01d1cf9242a27
3
+ size 599515441
runs/Sep16_10-14-08_648715bc8f44/1663323508.8070502/events.out.tfevents.1663323508.648715bc8f44.78.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d3b874e8b2dcd9fc3c589910da851ac3b00e154c2215b226b31f683451b3586c
3
+ size 5801
runs/Sep16_10-14-08_648715bc8f44/events.out.tfevents.1663323508.648715bc8f44.78.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:21f84d897244e519c5fa565b558e3a3ad25c9e201410aa90753fd6f861113324
3
+ size 8232
runs/Sep16_10-19-52_648715bc8f44/1663323646.7311208/events.out.tfevents.1663323646.648715bc8f44.662.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5bd8378f3303d8999b1b402936b797a420ebf96d3cf19a2476f1d520904f408d
3
+ size 5801
runs/Sep16_10-19-52_648715bc8f44/events.out.tfevents.1663323646.648715bc8f44.662.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c6e6943d852352a7ef29dc60c3d70f030169b713479d0da466376a76787ce143
3
+ size 8729
runs/Sep16_10-25-07_648715bc8f44/1663323918.1056142/events.out.tfevents.1663323918.648715bc8f44.897.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:97517e4f6866cb29ff2772388a7b8597a11eb21ba2aa2669b74ad1dfff49357b
3
+ size 5801
runs/Sep16_10-25-07_648715bc8f44/events.out.tfevents.1663323918.648715bc8f44.897.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b3c37fe0dc1caf776d07b0cc14788792513d7dbda48419cb876ff6c632c2f6cc
3
+ size 8886
special_tokens_map.json CHANGED
@@ -1,9 +1,15 @@
1
  {
2
  "bos_token": "<s>",
3
- "cls_token": "[CLS]",
4
  "eos_token": "</s>",
5
- "mask_token": "[MASK]",
6
- "pad_token": "[PAD]",
7
- "sep_token": "[SEP]",
8
- "unk_token": "[UNK]"
 
 
 
 
 
 
9
  }
 
1
  {
2
  "bos_token": "<s>",
3
+ "cls_token": "<s>",
4
  "eos_token": "</s>",
5
+ "mask_token": {
6
+ "content": "<mask>",
7
+ "lstrip": true,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false
11
+ },
12
+ "pad_token": "<pad>",
13
+ "sep_token": "</s>",
14
+ "unk_token": "<unk>"
15
  }
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1,18 +1,15 @@
1
  {
 
2
  "bos_token": "<s>",
3
- "cls_token": "[CLS]",
4
- "do_basic_tokenize": true,
5
  "do_lower_case": false,
6
  "eos_token": "</s>",
7
- "mask_token": "[MASK]",
8
- "model_max_length": 4096,
9
- "name_or_path": "/content/drive/MyDrive/project/kobigbird/first_model",
10
- "never_split": null,
11
- "pad_token": "[PAD]",
12
- "sep_token": "[SEP]",
13
- "special_tokens_map_file": "/root/.cache/huggingface/hub/models--monologg--kobigbird-bert-base/snapshots/ceacda477e20abef2c929adfa4a07c6f811323be/special_tokens_map.json",
14
- "strip_accents": null,
15
- "tokenize_chinese_chars": true,
16
- "tokenizer_class": "BertTokenizer",
17
- "unk_token": "[UNK]"
18
  }
 
1
  {
2
+ "add_prefix_space": false,
3
  "bos_token": "<s>",
4
+ "cls_token": "<s>",
 
5
  "do_lower_case": false,
6
  "eos_token": "</s>",
7
+ "errors": "replace",
8
+ "mask_token": "<mask>",
9
+ "name_or_path": "/content/drive/MyDrive/project/kobigbird/new_model",
10
+ "pad_token": "<pad>",
11
+ "sep_token": "</s>",
12
+ "special_tokens_map_file": null,
13
+ "tokenizer_class": "PreTrainedTokenizerFast",
14
+ "unk_token": "<unk>"
 
 
 
15
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6c56219765e2b892daf6010000c5bb4b5dbf4eb6ccbb64845394bbad3b4a3353
3
  size 3567
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:51d677c252f1fdd8ce2123964948abfe4e05ed0c9e41865a40bfc914f9fcbc79
3
  size 3567