itskamran commited on
Commit
76c3d2d
1 Parent(s): aca3402

Training in progress, epoch 1

Browse files
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ checkpoint-*/
config.json ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "facebook/esm2_t6_8M_UR50D",
3
+ "architectures": [
4
+ "EsmForSequenceClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.0,
7
+ "classifier_dropout": null,
8
+ "emb_layer_norm_before": false,
9
+ "esmfold_config": null,
10
+ "hidden_act": "gelu",
11
+ "hidden_dropout_prob": 0.0,
12
+ "hidden_size": 320,
13
+ "id2label": {
14
+ "0": "LABEL_0",
15
+ "1": "LABEL_1",
16
+ "2": "LABEL_2",
17
+ "3": "LABEL_3"
18
+ },
19
+ "initializer_range": 0.02,
20
+ "intermediate_size": 1280,
21
+ "is_folding_model": false,
22
+ "label2id": {
23
+ "LABEL_0": 0,
24
+ "LABEL_1": 1,
25
+ "LABEL_2": 2,
26
+ "LABEL_3": 3
27
+ },
28
+ "layer_norm_eps": 1e-05,
29
+ "mask_token_id": 32,
30
+ "max_position_embeddings": 1026,
31
+ "model_type": "esm",
32
+ "num_attention_heads": 20,
33
+ "num_hidden_layers": 6,
34
+ "pad_token_id": 1,
35
+ "position_embedding_type": "rotary",
36
+ "problem_type": "single_label_classification",
37
+ "token_dropout": true,
38
+ "torch_dtype": "float32",
39
+ "transformers_version": "4.31.0",
40
+ "use_cache": true,
41
+ "vocab_list": null,
42
+ "vocab_size": 33
43
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5c3b95b68842dff3b739e11c54fd5b85664771d75d2627b6b45dffa8b2fca832
3
+ size 31401613
runs/Aug18_13-03-09_ef8513458a55/events.out.tfevents.1692363795.ef8513458a55.17600.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e0de9005e82abd738719f9f6aaf1a82485462134fadbd79db6f8ae867ea0480b
3
+ size 8280
runs/Aug18_13-08-24_ef8513458a55/events.out.tfevents.1692364108.ef8513458a55.20023.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:274d609bfeb6438c6992c63f431bbb5d7389884de51f3559e95809d7d1dca244
3
+ size 4431
runs/Aug18_13-21-45_ef8513458a55/events.out.tfevents.1692364908.ef8513458a55.20023.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a983f7b2cf97c1489e84915e65700c0ee198847e3a561523474eace6a05ef46e
3
+ size 4184
runs/Aug18_13-26-54_ef8513458a55/events.out.tfevents.1692365218.ef8513458a55.24931.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c4bc3935afd95d4828b6572739c59602902fa83c45972047fc8546a35445630d
3
+ size 4748
special_tokens_map.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "<cls>",
3
+ "eos_token": "<eos>",
4
+ "mask_token": "<mask>",
5
+ "pad_token": "<pad>",
6
+ "unk_token": "<unk>"
7
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ {
2
+ "clean_up_tokenization_spaces": true,
3
+ "model_max_length": 1024,
4
+ "tokenizer_class": "EsmTokenizer"
5
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cc1b5527ff23e4af81c862ddfb5900c20dcba75c221877c6612817fdb8639c00
3
+ size 4027
vocab.txt ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <cls>
2
+ <pad>
3
+ <eos>
4
+ <unk>
5
+ L
6
+ A
7
+ G
8
+ V
9
+ S
10
+ E
11
+ R
12
+ T
13
+ I
14
+ D
15
+ P
16
+ K
17
+ Q
18
+ N
19
+ F
20
+ Y
21
+ M
22
+ H
23
+ W
24
+ C
25
+ X
26
+ B
27
+ U
28
+ Z
29
+ O
30
+ .
31
+ -
32
+ <null_1>
33
+ <mask>