taufiqdp commited on
Commit
38578a1
1 Parent(s): a333fc1

Upload folder using huggingface_hub

Browse files
Files changed (38) hide show
  1. checkpoints/checkpoint-80000/config.json +29 -0
  2. checkpoints/checkpoint-80000/model.safetensors +3 -0
  3. checkpoints/checkpoint-80000/optimizer.pt +3 -0
  4. checkpoints/checkpoint-80000/rng_state.pth +3 -0
  5. checkpoints/checkpoint-80000/scheduler.pt +3 -0
  6. checkpoints/checkpoint-80000/trainer_state.json +0 -0
  7. checkpoints/checkpoint-80000/training_args.bin +3 -0
  8. checkpoints/checkpoint-81000/config.json +29 -0
  9. checkpoints/checkpoint-81000/model.safetensors +3 -0
  10. checkpoints/checkpoint-81000/optimizer.pt +3 -0
  11. checkpoints/checkpoint-81000/rng_state.pth +3 -0
  12. checkpoints/checkpoint-81000/scheduler.pt +3 -0
  13. checkpoints/checkpoint-81000/trainer_state.json +0 -0
  14. checkpoints/checkpoint-81000/training_args.bin +3 -0
  15. checkpoints/checkpoint-82000/config.json +29 -0
  16. checkpoints/checkpoint-82000/model.safetensors +3 -0
  17. checkpoints/checkpoint-82000/optimizer.pt +3 -0
  18. checkpoints/checkpoint-82000/rng_state.pth +3 -0
  19. checkpoints/checkpoint-82000/scheduler.pt +3 -0
  20. checkpoints/checkpoint-82000/trainer_state.json +0 -0
  21. checkpoints/checkpoint-82000/training_args.bin +3 -0
  22. checkpoints/checkpoint-83000/config.json +29 -0
  23. checkpoints/checkpoint-83000/model.safetensors +3 -0
  24. checkpoints/checkpoint-83000/optimizer.pt +3 -0
  25. checkpoints/checkpoint-83000/rng_state.pth +3 -0
  26. checkpoints/checkpoint-83000/scheduler.pt +3 -0
  27. checkpoints/checkpoint-83000/trainer_state.json +0 -0
  28. checkpoints/checkpoint-83000/training_args.bin +3 -0
  29. checkpoints/checkpoint-84000/config.json +29 -0
  30. checkpoints/checkpoint-84000/model.safetensors +3 -0
  31. checkpoints/checkpoint-84000/optimizer.pt +3 -0
  32. checkpoints/checkpoint-84000/rng_state.pth +3 -0
  33. checkpoints/checkpoint-84000/scheduler.pt +3 -0
  34. checkpoints/checkpoint-84000/trainer_state.json +0 -0
  35. checkpoints/checkpoint-84000/training_args.bin +3 -0
  36. checkpoints/config.json +1 -1
  37. checkpoints/model.safetensors +1 -1
  38. checkpoints/training_args.bin +1 -1
checkpoints/checkpoint-80000/config.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/content/drive/MyDrive/Colab Notebooks/nusa-albert/checkpoint-73000",
3
+ "architectures": [
4
+ "AlbertForMaskedLM"
5
+ ],
6
+ "attention_probs_dropout_prob": 0,
7
+ "bos_token_id": 2,
8
+ "classifier_dropout_prob": 0.1,
9
+ "embedding_size": 128,
10
+ "eos_token_id": 3,
11
+ "hidden_act": "gelu_new",
12
+ "hidden_dropout_prob": 0,
13
+ "hidden_size": 768,
14
+ "initializer_range": 0.02,
15
+ "inner_group_num": 1,
16
+ "intermediate_size": 3072,
17
+ "layer_norm_eps": 1e-12,
18
+ "max_position_embeddings": 512,
19
+ "model_type": "albert",
20
+ "num_attention_heads": 12,
21
+ "num_hidden_groups": 1,
22
+ "num_hidden_layers": 12,
23
+ "pad_token_id": 0,
24
+ "position_embedding_type": "absolute",
25
+ "torch_dtype": "float32",
26
+ "transformers_version": "4.38.2",
27
+ "type_vocab_size": 2,
28
+ "vocab_size": 30000
29
+ }
checkpoints/checkpoint-80000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3cf6ddbd5afb17eda6130669a24c03bbb5c39b43788c55edb229786daa0a0028
3
+ size 44890256
checkpoints/checkpoint-80000/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:389f9903205dba4e130690e07fe4f7185bb41273dcebd5d9e0ffda5054d91270
3
+ size 89797322
checkpoints/checkpoint-80000/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:15d87bc4fc85e913083817b8e919b0c71d7b4e81800012a2cb4ec78d8eb85621
3
+ size 14244
checkpoints/checkpoint-80000/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:002a4c7f405c55d87cf31ef9286a749c831ad992649daa3c6c1a70bf0e2372ac
3
+ size 1064
checkpoints/checkpoint-80000/trainer_state.json ADDED
The diff for this file is too large to render. See raw diff
 
checkpoints/checkpoint-80000/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c01e33ee4a28c6eaffabcba6a2b3dacd7633a3546a77f6377b600ccc9c57c79
3
+ size 5112
checkpoints/checkpoint-81000/config.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/content/drive/MyDrive/Colab Notebooks/nusa-albert/checkpoint-73000",
3
+ "architectures": [
4
+ "AlbertForMaskedLM"
5
+ ],
6
+ "attention_probs_dropout_prob": 0,
7
+ "bos_token_id": 2,
8
+ "classifier_dropout_prob": 0.1,
9
+ "embedding_size": 128,
10
+ "eos_token_id": 3,
11
+ "hidden_act": "gelu_new",
12
+ "hidden_dropout_prob": 0,
13
+ "hidden_size": 768,
14
+ "initializer_range": 0.02,
15
+ "inner_group_num": 1,
16
+ "intermediate_size": 3072,
17
+ "layer_norm_eps": 1e-12,
18
+ "max_position_embeddings": 512,
19
+ "model_type": "albert",
20
+ "num_attention_heads": 12,
21
+ "num_hidden_groups": 1,
22
+ "num_hidden_layers": 12,
23
+ "pad_token_id": 0,
24
+ "position_embedding_type": "absolute",
25
+ "torch_dtype": "float32",
26
+ "transformers_version": "4.38.2",
27
+ "type_vocab_size": 2,
28
+ "vocab_size": 30000
29
+ }
checkpoints/checkpoint-81000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2f56a11a9128d486c32bb72f93c21a83237fc2c26abe1d0ae92aecb9b063fb75
3
+ size 44890256
checkpoints/checkpoint-81000/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2caccc49fbf6fcfb8ac7bfacd885f5611986fff3d045da986ebcea6ebd787fb2
3
+ size 89797322
checkpoints/checkpoint-81000/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:095e5422d5c7f9ca3c7fd4e6ece5b81dbc6c36fe0ae0c4468906b40d9f50a57c
3
+ size 14244
checkpoints/checkpoint-81000/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ec52c640f8f66f2224c39fba9d567ccd4b41598af27911a73ae8f2a97db8c0ad
3
+ size 1064
checkpoints/checkpoint-81000/trainer_state.json ADDED
The diff for this file is too large to render. See raw diff
 
checkpoints/checkpoint-81000/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c01e33ee4a28c6eaffabcba6a2b3dacd7633a3546a77f6377b600ccc9c57c79
3
+ size 5112
checkpoints/checkpoint-82000/config.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/content/drive/MyDrive/Colab Notebooks/nusa-albert/checkpoint-73000",
3
+ "architectures": [
4
+ "AlbertForMaskedLM"
5
+ ],
6
+ "attention_probs_dropout_prob": 0,
7
+ "bos_token_id": 2,
8
+ "classifier_dropout_prob": 0.1,
9
+ "embedding_size": 128,
10
+ "eos_token_id": 3,
11
+ "hidden_act": "gelu_new",
12
+ "hidden_dropout_prob": 0,
13
+ "hidden_size": 768,
14
+ "initializer_range": 0.02,
15
+ "inner_group_num": 1,
16
+ "intermediate_size": 3072,
17
+ "layer_norm_eps": 1e-12,
18
+ "max_position_embeddings": 512,
19
+ "model_type": "albert",
20
+ "num_attention_heads": 12,
21
+ "num_hidden_groups": 1,
22
+ "num_hidden_layers": 12,
23
+ "pad_token_id": 0,
24
+ "position_embedding_type": "absolute",
25
+ "torch_dtype": "float32",
26
+ "transformers_version": "4.38.2",
27
+ "type_vocab_size": 2,
28
+ "vocab_size": 30000
29
+ }
checkpoints/checkpoint-82000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:845bc926a5f6f65588e067467b79ad42b31856c5d8f8cbd42c5a719c73f448c4
3
+ size 44890256
checkpoints/checkpoint-82000/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:df6c4e7fb13600f84c3d31a36e7b50f7a47318c639ada99fcd299b496250bfeb
3
+ size 89797322
checkpoints/checkpoint-82000/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:74b413e9fe191f81dfa814e39c166a44946a73550ab154673015aa50520bccdc
3
+ size 14244
checkpoints/checkpoint-82000/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5c7ce3f92314c38971e1da80300c1c81e83dd179024dd7970f9933f7e8c24342
3
+ size 1064
checkpoints/checkpoint-82000/trainer_state.json ADDED
The diff for this file is too large to render. See raw diff
 
checkpoints/checkpoint-82000/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c01e33ee4a28c6eaffabcba6a2b3dacd7633a3546a77f6377b600ccc9c57c79
3
+ size 5112
checkpoints/checkpoint-83000/config.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/content/drive/MyDrive/Colab Notebooks/nusa-albert/checkpoint-73000",
3
+ "architectures": [
4
+ "AlbertForMaskedLM"
5
+ ],
6
+ "attention_probs_dropout_prob": 0,
7
+ "bos_token_id": 2,
8
+ "classifier_dropout_prob": 0.1,
9
+ "embedding_size": 128,
10
+ "eos_token_id": 3,
11
+ "hidden_act": "gelu_new",
12
+ "hidden_dropout_prob": 0,
13
+ "hidden_size": 768,
14
+ "initializer_range": 0.02,
15
+ "inner_group_num": 1,
16
+ "intermediate_size": 3072,
17
+ "layer_norm_eps": 1e-12,
18
+ "max_position_embeddings": 512,
19
+ "model_type": "albert",
20
+ "num_attention_heads": 12,
21
+ "num_hidden_groups": 1,
22
+ "num_hidden_layers": 12,
23
+ "pad_token_id": 0,
24
+ "position_embedding_type": "absolute",
25
+ "torch_dtype": "float32",
26
+ "transformers_version": "4.38.2",
27
+ "type_vocab_size": 2,
28
+ "vocab_size": 30000
29
+ }
checkpoints/checkpoint-83000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9429bfe4e9017d6ef6d6c92d892109f5f3c6637c53066db0ca66ed72eecf7873
3
+ size 44890256
checkpoints/checkpoint-83000/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:634c625fe0bd83b72f3dcaaf98991f58467ab9345b2729fdcccd811d2c937219
3
+ size 89797322
checkpoints/checkpoint-83000/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7958f6d8eeb2fdb7ce6471330a7d0b9b9fb1f733104a43446ece513a6ad70742
3
+ size 14244
checkpoints/checkpoint-83000/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a1d05f156e2ede0b8c6c3cf6c0b13f25fde279787723df443d651ed98927915e
3
+ size 1064
checkpoints/checkpoint-83000/trainer_state.json ADDED
The diff for this file is too large to render. See raw diff
 
checkpoints/checkpoint-83000/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c01e33ee4a28c6eaffabcba6a2b3dacd7633a3546a77f6377b600ccc9c57c79
3
+ size 5112
checkpoints/checkpoint-84000/config.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/content/drive/MyDrive/Colab Notebooks/nusa-albert/checkpoint-73000",
3
+ "architectures": [
4
+ "AlbertForMaskedLM"
5
+ ],
6
+ "attention_probs_dropout_prob": 0,
7
+ "bos_token_id": 2,
8
+ "classifier_dropout_prob": 0.1,
9
+ "embedding_size": 128,
10
+ "eos_token_id": 3,
11
+ "hidden_act": "gelu_new",
12
+ "hidden_dropout_prob": 0,
13
+ "hidden_size": 768,
14
+ "initializer_range": 0.02,
15
+ "inner_group_num": 1,
16
+ "intermediate_size": 3072,
17
+ "layer_norm_eps": 1e-12,
18
+ "max_position_embeddings": 512,
19
+ "model_type": "albert",
20
+ "num_attention_heads": 12,
21
+ "num_hidden_groups": 1,
22
+ "num_hidden_layers": 12,
23
+ "pad_token_id": 0,
24
+ "position_embedding_type": "absolute",
25
+ "torch_dtype": "float32",
26
+ "transformers_version": "4.38.2",
27
+ "type_vocab_size": 2,
28
+ "vocab_size": 30000
29
+ }
checkpoints/checkpoint-84000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d632badc96fc12a80b7c452bd7c82d37324e338ad44248106c83baeb8f06cec9
3
+ size 44890256
checkpoints/checkpoint-84000/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:63ee400d5e367fbd3d572b1a54cb5c1cc5a2e441447449851cfcea0ab22dfebf
3
+ size 89797322
checkpoints/checkpoint-84000/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4a35fe8ec5b6535891b5b1ec6c9c696b9c4e79f5efeb3e41eb457e3ae361570f
3
+ size 14244
checkpoints/checkpoint-84000/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f3942093fce43d34042d0d0f942cdddc21fa5b7379b1469fab0f2110a4bc6595
3
+ size 1064
checkpoints/checkpoint-84000/trainer_state.json ADDED
The diff for this file is too large to render. See raw diff
 
checkpoints/checkpoint-84000/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c01e33ee4a28c6eaffabcba6a2b3dacd7633a3546a77f6377b600ccc9c57c79
3
+ size 5112
checkpoints/config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/content/drive/MyDrive/Colab Notebooks/nusa-albert/checkpoint-54000",
3
  "architectures": [
4
  "AlbertForMaskedLM"
5
  ],
 
1
  {
2
+ "_name_or_path": "/content/drive/MyDrive/Colab Notebooks/nusa-albert/checkpoint-73000",
3
  "architectures": [
4
  "AlbertForMaskedLM"
5
  ],
checkpoints/model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9bdf2140400c28602e5b9c9350668395fa8f9253f444034541608c6c018cace3
3
  size 44890256
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d632badc96fc12a80b7c452bd7c82d37324e338ad44248106c83baeb8f06cec9
3
  size 44890256
checkpoints/training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c3122098f6dd0b39207b2763c78d63fe9889378f88849a628ec4adff3d17a326
3
  size 5112
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c01e33ee4a28c6eaffabcba6a2b3dacd7633a3546a77f6377b600ccc9c57c79
3
  size 5112