NahedAbdelgaber commited on
Commit
0bcf313
1 Parent(s): e41c370

Training in progress, epoch 1

Browse files
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ checkpoint-*/
config.json ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "distilbert-base-uncased",
3
+ "activation": "gelu",
4
+ "architectures": [
5
+ "DistilBertForTokenClassification"
6
+ ],
7
+ "attention_dropout": 0.1,
8
+ "dim": 768,
9
+ "dropout": 0.1,
10
+ "hidden_dim": 3072,
11
+ "id2label": {
12
+ "0": "O",
13
+ "1": "B-Lead",
14
+ "2": "I-Lead",
15
+ "3": "L-Lead",
16
+ "4": "U-Lead",
17
+ "5": "B-Position",
18
+ "6": "I-Position",
19
+ "7": "L-Position",
20
+ "8": "U-Position",
21
+ "9": "B-Concluding Statement",
22
+ "10": "I-Concluding Statement",
23
+ "11": "L-Concluding Statement",
24
+ "12": "U-Concluding Statement",
25
+ "13": "B-Counterclaim",
26
+ "14": "I-Counterclaim",
27
+ "15": "L-Counterclaim",
28
+ "16": "U-Counterclaim",
29
+ "17": "B-Rebuttal",
30
+ "18": "I-Rebuttal",
31
+ "19": "L-Rebuttal",
32
+ "20": "U-Rebuttal",
33
+ "21": "B-Claim",
34
+ "22": "I-Claim",
35
+ "23": "L-Claim",
36
+ "24": "U-Claim",
37
+ "25": "B-Evidence",
38
+ "26": "I-Evidence",
39
+ "27": "L-Evidence",
40
+ "28": "U-Evidence"
41
+ },
42
+ "initializer_range": 0.02,
43
+ "label2id": {
44
+ "B-Claim": 21,
45
+ "B-Concluding Statement": 9,
46
+ "B-Counterclaim": 13,
47
+ "B-Evidence": 25,
48
+ "B-Lead": 1,
49
+ "B-Position": 5,
50
+ "B-Rebuttal": 17,
51
+ "I-Claim": 22,
52
+ "I-Concluding Statement": 10,
53
+ "I-Counterclaim": 14,
54
+ "I-Evidence": 26,
55
+ "I-Lead": 2,
56
+ "I-Position": 6,
57
+ "I-Rebuttal": 18,
58
+ "L-Claim": 23,
59
+ "L-Concluding Statement": 11,
60
+ "L-Counterclaim": 15,
61
+ "L-Evidence": 27,
62
+ "L-Lead": 3,
63
+ "L-Position": 7,
64
+ "L-Rebuttal": 19,
65
+ "O": 0,
66
+ "U-Claim": 24,
67
+ "U-Concluding Statement": 12,
68
+ "U-Counterclaim": 16,
69
+ "U-Evidence": 28,
70
+ "U-Lead": 4,
71
+ "U-Position": 8,
72
+ "U-Rebuttal": 20
73
+ },
74
+ "max_position_embeddings": 512,
75
+ "model_type": "distilbert",
76
+ "n_heads": 12,
77
+ "n_layers": 6,
78
+ "pad_token_id": 0,
79
+ "qa_dropout": 0.1,
80
+ "seq_classif_dropout": 0.2,
81
+ "sinusoidal_pos_embds": false,
82
+ "tie_weights_": true,
83
+ "torch_dtype": "float32",
84
+ "transformers_version": "4.12.5",
85
+ "vocab_size": 30522
86
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8937dcaf1aa5419fa2ec95abb77b8551dda1836333317331ca35503b6761f2c9
3
+ size 265580085
runs/Jan06_05-25-51_5f3ab8275dd7/1641446791.2227511/events.out.tfevents.1641446791.5f3ab8275dd7.34.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:31ca05691923cfd1ecc94457d2eb0b6b44a831e31260e9f05252e3f30145853e
3
+ size 4588
runs/Jan06_05-25-51_5f3ab8275dd7/events.out.tfevents.1641446791.5f3ab8275dd7.34.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cb3eaf58e9543291fb62bb26a51f01383e704a6e595e1bf517d463f1d01d8484
3
+ size 5264
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"do_lower_case": true, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "return_offsets_mapping": true, "truncation": true, "max_length": 512, "padding": "max_length", "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "NahedAbdelgaber/evaluating-student-writing-distibert-ner", "tokenizer_class": "DistilBertTokenizer"}
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e2a1334c5f838cd69fb6d299ab45fc05737987ff48b371618aec1407768edb83
3
+ size 2863
vocab.txt ADDED
The diff for this file is too large to render. See raw diff