ByungjunKim commited on
Commit
32566a5
1 Parent(s): 4985d7d

End of training

Browse files
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ checkpoint-*/
config.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "distilbert-base-uncased",
3
+ "activation": "gelu",
4
+ "architectures": [
5
+ "DistilBertForMaskedLM"
6
+ ],
7
+ "attention_dropout": 0.1,
8
+ "dim": 768,
9
+ "dropout": 0.1,
10
+ "hidden_dim": 3072,
11
+ "initializer_range": 0.02,
12
+ "max_position_embeddings": 512,
13
+ "model_type": "distilbert",
14
+ "n_heads": 12,
15
+ "n_layers": 6,
16
+ "pad_token_id": 0,
17
+ "qa_dropout": 0.1,
18
+ "seq_classif_dropout": 0.2,
19
+ "sinusoidal_pos_embds": false,
20
+ "tie_weights_": true,
21
+ "torch_dtype": "float32",
22
+ "transformers_version": "4.23.1",
23
+ "vocab_size": 30522
24
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d758116aa088086fdaee3bf3bb23fd0af9060f61d788d8c2cd5918f2dd5dfbe2
3
+ size 267976815
runs/Oct28_01-27-42_7cc6df682810/1666920538.3494058/events.out.tfevents.1666920538.7cc6df682810.78.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:34c4801375521d5a6e9d698cd41f7642fffb4bb88ac4d7c29fc7691c20662f6d
3
+ size 5532
runs/Oct28_01-27-42_7cc6df682810/events.out.tfevents.1666920531.7cc6df682810.78.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0bda4fb08022d59727be4909cddce022c30c98e2054204262ae99e2af703f105
3
+ size 5497
runs/Oct28_01-27-42_7cc6df682810/events.out.tfevents.1666920701.7cc6df682810.78.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e74693783b39349d65d3d56f9cf2228b0d55c6827ac0c42c14d120fc4b399ef9
3
+ size 311
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2e99d00182df421e2178db31a9cc73cde1d54efd7e348c4e9cb0d6d3267fa0f2
3
+ size 3439