dima806 commited on
Commit
e4da66e
1 Parent(s): 4944497

Upload folder using huggingface_hub

Browse files
checkpoint-17578/config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "distilbert-base-cased",
3
+ "activation": "gelu",
4
+ "architectures": [
5
+ "DistilBertForSequenceClassification"
6
+ ],
7
+ "attention_dropout": 0.1,
8
+ "dim": 768,
9
+ "dropout": 0.1,
10
+ "hidden_dim": 3072,
11
+ "id2label": {
12
+ "0": "ham",
13
+ "1": "spam"
14
+ },
15
+ "initializer_range": 0.02,
16
+ "label2id": {
17
+ "ham": 0,
18
+ "spam": 1
19
+ },
20
+ "max_position_embeddings": 512,
21
+ "model_type": "distilbert",
22
+ "n_heads": 12,
23
+ "n_layers": 6,
24
+ "output_past": true,
25
+ "pad_token_id": 0,
26
+ "qa_dropout": 0.1,
27
+ "seq_classif_dropout": 0.2,
28
+ "sinusoidal_pos_embds": false,
29
+ "tie_weights_": true,
30
+ "torch_dtype": "float32",
31
+ "transformers_version": "4.35.0",
32
+ "vocab_size": 28996
33
+ }
checkpoint-17578/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3b844261a573b3a7ab332980931b4fdfba8c8ac50cdcf489ae45c4381c0df831
3
+ size 263144680
checkpoint-17578/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e101c842cac01819a9829cb67a192ba63a3f5d903affa6852a8da0fb7b51a82c
3
+ size 526351109
checkpoint-17578/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:412324766b3401ecc727285913881212414b32ee9aa655b78d9f399ef7419d00
3
+ size 14575
checkpoint-17578/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ca54ecfbd3e2d6d1ac872bf413d0e047f9604847fd6dce9ea5c6d13336eb4f60
3
+ size 627
checkpoint-17578/trainer_state.json ADDED
The diff for this file is too large to render. See raw diff
 
checkpoint-17578/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a8b2f8de67f161a076ffd902aa4fef3e91405678e82cbb96df7d594daf993082
3
+ size 4091
config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "distilbert-base-cased",
3
+ "activation": "gelu",
4
+ "architectures": [
5
+ "DistilBertForSequenceClassification"
6
+ ],
7
+ "attention_dropout": 0.1,
8
+ "dim": 768,
9
+ "dropout": 0.1,
10
+ "hidden_dim": 3072,
11
+ "id2label": {
12
+ "0": "ham",
13
+ "1": "spam"
14
+ },
15
+ "initializer_range": 0.02,
16
+ "label2id": {
17
+ "ham": 0,
18
+ "spam": 1
19
+ },
20
+ "max_position_embeddings": 512,
21
+ "model_type": "distilbert",
22
+ "n_heads": 12,
23
+ "n_layers": 6,
24
+ "output_past": true,
25
+ "pad_token_id": 0,
26
+ "qa_dropout": 0.1,
27
+ "seq_classif_dropout": 0.2,
28
+ "sinusoidal_pos_embds": false,
29
+ "tie_weights_": true,
30
+ "torch_dtype": "float32",
31
+ "transformers_version": "4.35.0",
32
+ "vocab_size": 28996
33
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3b844261a573b3a7ab332980931b4fdfba8c8ac50cdcf489ae45c4381c0df831
3
+ size 263144680
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a8b2f8de67f161a076ffd902aa4fef3e91405678e82cbb96df7d594daf993082
3
+ size 4091
vocab.txt ADDED
The diff for this file is too large to render. See raw diff