ngchuchi commited on
Commit
6b21758
1 Parent(s): dc69543

Training in progress, epoch 1

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "abhi1nandy2/EManuals_RoBERTa",
3
  "architectures": [
4
  "RobertaForQuestionAnswering"
5
  ],
@@ -13,15 +13,16 @@
13
  "hidden_size": 768,
14
  "initializer_range": 0.02,
15
  "intermediate_size": 3072,
 
16
  "layer_norm_eps": 1e-05,
17
  "max_position_embeddings": 514,
18
  "model_type": "roberta",
 
19
  "num_attention_heads": 12,
20
  "num_hidden_layers": 12,
21
  "pad_token_id": 1,
22
  "position_embedding_type": "absolute",
23
  "torch_dtype": "float32",
24
- "total_flos": 4462005349122048000,
25
  "transformers_version": "4.38.2",
26
  "type_vocab_size": 1,
27
  "use_cache": true,
 
1
  {
2
+ "_name_or_path": "deepset/roberta-base-squad2",
3
  "architectures": [
4
  "RobertaForQuestionAnswering"
5
  ],
 
13
  "hidden_size": 768,
14
  "initializer_range": 0.02,
15
  "intermediate_size": 3072,
16
+ "language": "english",
17
  "layer_norm_eps": 1e-05,
18
  "max_position_embeddings": 514,
19
  "model_type": "roberta",
20
+ "name": "Roberta",
21
  "num_attention_heads": 12,
22
  "num_hidden_layers": 12,
23
  "pad_token_id": 1,
24
  "position_embedding_type": "absolute",
25
  "torch_dtype": "float32",
 
26
  "transformers_version": "4.38.2",
27
  "type_vocab_size": 1,
28
  "use_cache": true,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:405f9f20126efe5951563e9769ea425e1a0c47b1d41698062ce8607eb11c4e86
3
  size 496250232
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3731a35b2915b6ee9f04273d3aa7b898fea14da2b2169b998cf9841ec15cb34c
3
  size 496250232
runs/Mar28_07-55-42_d9913f6e95b9/events.out.tfevents.1711612551.d9913f6e95b9.995.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:73999489c5f105764494a21d09bb40d8684f46efa4b64c64f211c34859fb4ad6
3
+ size 5116
special_tokens_map.json CHANGED
@@ -2,49 +2,49 @@
2
  "bos_token": {
3
  "content": "<s>",
4
  "lstrip": false,
5
- "normalized": false,
6
  "rstrip": false,
7
  "single_word": false
8
  },
9
  "cls_token": {
10
  "content": "<s>",
11
  "lstrip": false,
12
- "normalized": false,
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
  "eos_token": {
17
  "content": "</s>",
18
  "lstrip": false,
19
- "normalized": false,
20
  "rstrip": false,
21
  "single_word": false
22
  },
23
  "mask_token": {
24
  "content": "<mask>",
25
  "lstrip": true,
26
- "normalized": false,
27
  "rstrip": false,
28
  "single_word": false
29
  },
30
  "pad_token": {
31
  "content": "<pad>",
32
  "lstrip": false,
33
- "normalized": false,
34
  "rstrip": false,
35
  "single_word": false
36
  },
37
  "sep_token": {
38
  "content": "</s>",
39
  "lstrip": false,
40
- "normalized": false,
41
  "rstrip": false,
42
  "single_word": false
43
  },
44
  "unk_token": {
45
  "content": "<unk>",
46
  "lstrip": false,
47
- "normalized": false,
48
  "rstrip": false,
49
  "single_word": false
50
  }
 
2
  "bos_token": {
3
  "content": "<s>",
4
  "lstrip": false,
5
+ "normalized": true,
6
  "rstrip": false,
7
  "single_word": false
8
  },
9
  "cls_token": {
10
  "content": "<s>",
11
  "lstrip": false,
12
+ "normalized": true,
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
  "eos_token": {
17
  "content": "</s>",
18
  "lstrip": false,
19
+ "normalized": true,
20
  "rstrip": false,
21
  "single_word": false
22
  },
23
  "mask_token": {
24
  "content": "<mask>",
25
  "lstrip": true,
26
+ "normalized": true,
27
  "rstrip": false,
28
  "single_word": false
29
  },
30
  "pad_token": {
31
  "content": "<pad>",
32
  "lstrip": false,
33
+ "normalized": true,
34
  "rstrip": false,
35
  "single_word": false
36
  },
37
  "sep_token": {
38
  "content": "</s>",
39
  "lstrip": false,
40
+ "normalized": true,
41
  "rstrip": false,
42
  "single_word": false
43
  },
44
  "unk_token": {
45
  "content": "<unk>",
46
  "lstrip": false,
47
+ "normalized": true,
48
  "rstrip": false,
49
  "single_word": false
50
  }
tokenizer.json CHANGED
@@ -1,7 +1,21 @@
1
  {
2
  "version": "1.0",
3
- "truncation": null,
4
- "padding": null,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  "added_tokens": [
6
  {
7
  "id": 0,
@@ -9,7 +23,7 @@
9
  "single_word": false,
10
  "lstrip": false,
11
  "rstrip": false,
12
- "normalized": false,
13
  "special": true
14
  },
15
  {
@@ -18,7 +32,7 @@
18
  "single_word": false,
19
  "lstrip": false,
20
  "rstrip": false,
21
- "normalized": false,
22
  "special": true
23
  },
24
  {
@@ -27,7 +41,7 @@
27
  "single_word": false,
28
  "lstrip": false,
29
  "rstrip": false,
30
- "normalized": false,
31
  "special": true
32
  },
33
  {
@@ -36,7 +50,7 @@
36
  "single_word": false,
37
  "lstrip": false,
38
  "rstrip": false,
39
- "normalized": false,
40
  "special": true
41
  },
42
  {
@@ -45,7 +59,7 @@
45
  "single_word": false,
46
  "lstrip": true,
47
  "rstrip": false,
48
- "normalized": false,
49
  "special": true
50
  }
51
  ],
 
1
  {
2
  "version": "1.0",
3
+ "truncation": {
4
+ "direction": "Right",
5
+ "max_length": 384,
6
+ "strategy": "OnlySecond",
7
+ "stride": 128
8
+ },
9
+ "padding": {
10
+ "strategy": {
11
+ "Fixed": 384
12
+ },
13
+ "direction": "Right",
14
+ "pad_to_multiple_of": null,
15
+ "pad_id": 1,
16
+ "pad_type_id": 0,
17
+ "pad_token": "<pad>"
18
+ },
19
  "added_tokens": [
20
  {
21
  "id": 0,
 
23
  "single_word": false,
24
  "lstrip": false,
25
  "rstrip": false,
26
+ "normalized": true,
27
  "special": true
28
  },
29
  {
 
32
  "single_word": false,
33
  "lstrip": false,
34
  "rstrip": false,
35
+ "normalized": true,
36
  "special": true
37
  },
38
  {
 
41
  "single_word": false,
42
  "lstrip": false,
43
  "rstrip": false,
44
+ "normalized": true,
45
  "special": true
46
  },
47
  {
 
50
  "single_word": false,
51
  "lstrip": false,
52
  "rstrip": false,
53
+ "normalized": true,
54
  "special": true
55
  },
56
  {
 
59
  "single_word": false,
60
  "lstrip": true,
61
  "rstrip": false,
62
+ "normalized": true,
63
  "special": true
64
  }
65
  ],
tokenizer_config.json CHANGED
@@ -4,7 +4,7 @@
4
  "0": {
5
  "content": "<s>",
6
  "lstrip": false,
7
- "normalized": false,
8
  "rstrip": false,
9
  "single_word": false,
10
  "special": true
@@ -12,7 +12,7 @@
12
  "1": {
13
  "content": "<pad>",
14
  "lstrip": false,
15
- "normalized": false,
16
  "rstrip": false,
17
  "single_word": false,
18
  "special": true
@@ -20,7 +20,7 @@
20
  "2": {
21
  "content": "</s>",
22
  "lstrip": false,
23
- "normalized": false,
24
  "rstrip": false,
25
  "single_word": false,
26
  "special": true
@@ -28,7 +28,7 @@
28
  "3": {
29
  "content": "<unk>",
30
  "lstrip": false,
31
- "normalized": false,
32
  "rstrip": false,
33
  "single_word": false,
34
  "special": true
@@ -36,7 +36,7 @@
36
  "50264": {
37
  "content": "<mask>",
38
  "lstrip": true,
39
- "normalized": false,
40
  "rstrip": false,
41
  "single_word": false,
42
  "special": true
@@ -45,6 +45,7 @@
45
  "bos_token": "<s>",
46
  "clean_up_tokenization_spaces": true,
47
  "cls_token": "<s>",
 
48
  "eos_token": "</s>",
49
  "errors": "replace",
50
  "full_tokenizer_file": null,
 
4
  "0": {
5
  "content": "<s>",
6
  "lstrip": false,
7
+ "normalized": true,
8
  "rstrip": false,
9
  "single_word": false,
10
  "special": true
 
12
  "1": {
13
  "content": "<pad>",
14
  "lstrip": false,
15
+ "normalized": true,
16
  "rstrip": false,
17
  "single_word": false,
18
  "special": true
 
20
  "2": {
21
  "content": "</s>",
22
  "lstrip": false,
23
+ "normalized": true,
24
  "rstrip": false,
25
  "single_word": false,
26
  "special": true
 
28
  "3": {
29
  "content": "<unk>",
30
  "lstrip": false,
31
+ "normalized": true,
32
  "rstrip": false,
33
  "single_word": false,
34
  "special": true
 
36
  "50264": {
37
  "content": "<mask>",
38
  "lstrip": true,
39
+ "normalized": true,
40
  "rstrip": false,
41
  "single_word": false,
42
  "special": true
 
45
  "bos_token": "<s>",
46
  "clean_up_tokenization_spaces": true,
47
  "cls_token": "<s>",
48
+ "do_lower_case": false,
49
  "eos_token": "</s>",
50
  "errors": "replace",
51
  "full_tokenizer_file": null,
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9037fee9cb00f098d96840488874d714950f61fd75a4b721c70c02e09ab57635
3
  size 4856
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:67c16a3fe2aaab2b0dd8dbbfbc43ebd8046821b0fdf0104aa3eb08f5b83dd867
3
  size 4856