SauravMaheshkar commited on
Commit
15f7be7
1 Parent(s): 42adabb

Initial Commit

Browse files
.gitignore ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ # MacOS Stuff
2
+ **/.DS_Store
config.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "SauravMaheshkar/distilbert-base-uncased-distilled-chaii",
3
+ "activation": "gelu",
4
+ "architectures": [
5
+ "DistilBertForQuestionAnswering"
6
+ ],
7
+ "attention_dropout": 0.1,
8
+ "dim": 768,
9
+ "dropout": 0.1,
10
+ "hidden_dim": 3072,
11
+ "initializer_range": 0.02,
12
+ "max_position_embeddings": 512,
13
+ "model_type": "distilbert",
14
+ "n_heads": 12,
15
+ "n_layers": 6,
16
+ "pad_token_id": 0,
17
+ "qa_dropout": 0.1,
18
+ "seq_classif_dropout": 0.2,
19
+ "sinusoidal_pos_embds": false,
20
+ "tie_weights_": true,
21
+ "transformers_version": "4.6.1",
22
+ "vocab_size": 30522
23
+ }
optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7543991fd9f57c6a02edae448c87fe76739fba1cc8bc23d28c1af9ca8c9b1fa9
3
+ size 530979419
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:63242105f9523b3cf3835386d7f72871a5fd2aa2284fa090e0edfc4718925dc6
3
+ size 265500101
rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8e2563e3e24007c57efddd36843ab3bd7c9bc2167831ac8715bb770b46dd36ca
3
+ size 14657
scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:18132446c5dbcce34518f3b4224f48ebb3dfcfecc009681bece771b03cef4533
3
+ size 623
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"do_lower_case": true, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "special_tokens_map_file": null, "name_or_path": "SauravMaheshkar/distilbert-base-uncased-distilled-chaii"}
trainer_state.json ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 0.9998933560840354,
5
+ "global_step": 2344,
6
+ "is_hyper_param_search": false,
7
+ "is_local_process_zero": true,
8
+ "is_world_process_zero": true,
9
+ "log_history": [
10
+ {
11
+ "epoch": 0.21,
12
+ "learning_rate": 1.7486960644855383e-05,
13
+ "loss": 1.4078,
14
+ "step": 500
15
+ },
16
+ {
17
+ "epoch": 0.43,
18
+ "learning_rate": 1.2745376955903272e-05,
19
+ "loss": 1.1637,
20
+ "step": 1000
21
+ },
22
+ {
23
+ "epoch": 0.64,
24
+ "learning_rate": 8.003793266951162e-06,
25
+ "loss": 1.0799,
26
+ "step": 1500
27
+ },
28
+ {
29
+ "epoch": 0.85,
30
+ "learning_rate": 3.262209577999052e-06,
31
+ "loss": 1.0642,
32
+ "step": 2000
33
+ },
34
+ {
35
+ "epoch": 1.0,
36
+ "eval_loss": 1.197797417640686,
37
+ "eval_runtime": 2.2647,
38
+ "eval_samples_per_second": 137.328,
39
+ "step": 2344
40
+ }
41
+ ],
42
+ "max_steps": 2344,
43
+ "num_train_epochs": 1,
44
+ "total_flos": 5734497329676288.0,
45
+ "trial_name": null,
46
+ "trial_params": null
47
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7dbeb59108bb5692a34bf4e36cc0a916ac4a9f6587864a935a40e9e7c4e3f126
3
+ size 2415
vocab.txt ADDED
The diff for this file is too large to render. See raw diff