Sayan01 commited on
Commit
7de8d8d
1 Parent(s): f97da7b

Training in progress, epoch 1

Browse files
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ checkpoint-*/
config.json ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "distilroberta-base",
3
+ "adapters": {
4
+ "adapters": {},
5
+ "config_map": {},
6
+ "fusion_config_map": {},
7
+ "fusions": {}
8
+ },
9
+ "architectures": [
10
+ "RobertaForSequenceClassification"
11
+ ],
12
+ "attention_probs_dropout_prob": 0.1,
13
+ "bos_token_id": 0,
14
+ "classifier_dropout": null,
15
+ "eos_token_id": 2,
16
+ "hidden_act": "gelu",
17
+ "hidden_dropout_prob": 0.1,
18
+ "hidden_size": 768,
19
+ "id2label": {
20
+ "0": "entailment",
21
+ "1": "not_entailment"
22
+ },
23
+ "initializer_range": 0.02,
24
+ "intermediate_size": 3072,
25
+ "label2id": {
26
+ "entailment": "0",
27
+ "not_entailment": "1"
28
+ },
29
+ "layer_norm_eps": 1e-05,
30
+ "max_position_embeddings": 514,
31
+ "model_type": "roberta",
32
+ "num_attention_heads": 12,
33
+ "num_hidden_layers": 6,
34
+ "pad_token_id": 1,
35
+ "position_embedding_type": "absolute",
36
+ "problem_type": "single_label_classification",
37
+ "torch_dtype": "float32",
38
+ "transformers_version": "4.21.3",
39
+ "type_vocab_size": 1,
40
+ "use_cache": true,
41
+ "vocab_size": 50265
42
+ }
logs/1664212651.4820461/events.out.tfevents.1664212651.fd84fa762179 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5940da50bccd1d974da70d2593ae61c0bb70d81b2f5d50b9f25c4526fe3ebc88
3
+ size 5231
logs/1664264901.6054327/events.out.tfevents.1664264901.fd84fa762179 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:644d579a6735ed9ff90fcd2d61ad260e559abe42846797601b785f618349ac4e
3
+ size 5231
logs/1664265030.030459/events.out.tfevents.1664265030.fd84fa762179 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9926c87e95c46939a7db7863d512dba477e3ecbbf260bd4ed54aed21c929146e
3
+ size 5231
logs/events.out.tfevents.1664212590.fd84fa762179 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:af8a3b4ab201867e793a81057c41057da2d6a86d0414a9d770018058854fd52c
3
+ size 4273
logs/events.out.tfevents.1664264875.fd84fa762179 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eac4a709a4fc035a9d6c912ef49669bab1ea9d1618c712a1127f4e1a663c1d89
3
+ size 4272
logs/events.out.tfevents.1664265019.fd84fa762179 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:96002e16a440304b33ea991f4a2f0346823eed6c13607f2191cc0b0d4dadc80c
3
+ size 4429
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:23b1ad560716e4708bf22a933a0d875523271f71d866c21589cbc8be5a3a179d
3
+ size 328518765
special_tokens_map.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "cls_token": "<s>",
4
+ "eos_token": "</s>",
5
+ "mask_token": {
6
+ "content": "<mask>",
7
+ "lstrip": true,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false
11
+ },
12
+ "pad_token": "<pad>",
13
+ "sep_token": "</s>",
14
+ "unk_token": "<unk>"
15
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "bos_token": "<s>",
4
+ "cls_token": "<s>",
5
+ "eos_token": "</s>",
6
+ "errors": "replace",
7
+ "mask_token": "<mask>",
8
+ "model_max_length": 1024,
9
+ "name_or_path": "facebook/bart-base",
10
+ "pad_token": "<pad>",
11
+ "sep_token": "</s>",
12
+ "special_tokens_map_file": null,
13
+ "tokenizer_class": "BartTokenizer",
14
+ "trim_offsets": true,
15
+ "unk_token": "<unk>"
16
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ee649652e860af38b796d5619517e87e5c178965ed2519e3e4a8d9517c20d9be
3
+ size 3439
vocab.json ADDED
The diff for this file is too large to render. See raw diff