philschmid HF staff commited on
Commit
f598453
1 Parent(s): 389a501

Training in progress, epoch 1

Browse files
config.json CHANGED
@@ -1,35 +1,55 @@
1
  {
2
- "_name_or_path": "google-bert/bert-base-uncased",
3
  "architectures": [
4
- "BertForSequenceClassification"
5
  ],
6
- "attention_probs_dropout_prob": 0.1,
7
- "classifier_dropout": null,
 
 
 
 
 
 
 
 
 
 
 
 
8
  "gradient_checkpointing": false,
9
- "hidden_act": "gelu",
10
- "hidden_dropout_prob": 0.1,
11
  "hidden_size": 768,
12
  "id2label": {
13
  "0": "small_llm",
14
  "1": "large_llm"
15
  },
 
16
  "initializer_range": 0.02,
17
- "intermediate_size": 3072,
18
  "label2id": {
19
  "large_llm": "1",
20
  "small_llm": "0"
21
  },
22
- "layer_norm_eps": 1e-12,
23
- "max_position_embeddings": 512,
24
- "model_type": "bert",
 
 
 
 
 
 
25
  "num_attention_heads": 12,
26
- "num_hidden_layers": 12,
27
- "pad_token_id": 0,
28
  "position_embedding_type": "absolute",
29
  "problem_type": "single_label_classification",
 
 
 
 
30
  "torch_dtype": "float32",
31
  "transformers_version": "4.48.0.dev0",
32
- "type_vocab_size": 2,
33
- "use_cache": true,
34
- "vocab_size": 30522
35
  }
 
1
  {
2
+ "_name_or_path": "answerdotai/ModernBERT-base",
3
  "architectures": [
4
+ "ModernBertForSequenceClassification"
5
  ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 50281,
9
+ "classifier_activation": "gelu",
10
+ "classifier_bias": false,
11
+ "classifier_dropout": 0.0,
12
+ "classifier_pooling": "mean",
13
+ "cls_token_id": 50281,
14
+ "decoder_bias": true,
15
+ "deterministic_flash_attn": false,
16
+ "embedding_dropout": 0.0,
17
+ "eos_token_id": 50282,
18
+ "global_attn_every_n_layers": 3,
19
+ "global_rope_theta": 160000.0,
20
  "gradient_checkpointing": false,
21
+ "hidden_activation": "gelu",
 
22
  "hidden_size": 768,
23
  "id2label": {
24
  "0": "small_llm",
25
  "1": "large_llm"
26
  },
27
+ "initializer_cutoff_factor": 2.0,
28
  "initializer_range": 0.02,
29
+ "intermediate_size": 1152,
30
  "label2id": {
31
  "large_llm": "1",
32
  "small_llm": "0"
33
  },
34
+ "layer_norm_eps": 1e-05,
35
+ "local_attention": 128,
36
+ "local_rope_theta": 10000.0,
37
+ "max_position_embeddings": 8192,
38
+ "mlp_bias": false,
39
+ "mlp_dropout": 0.0,
40
+ "model_type": "modernbert",
41
+ "norm_bias": false,
42
+ "norm_eps": 1e-05,
43
  "num_attention_heads": 12,
44
+ "num_hidden_layers": 22,
45
+ "pad_token_id": 50283,
46
  "position_embedding_type": "absolute",
47
  "problem_type": "single_label_classification",
48
+ "reference_compile": true,
49
+ "sep_token_id": 50282,
50
+ "sparse_pred_ignore_index": -100,
51
+ "sparse_prediction": false,
52
  "torch_dtype": "float32",
53
  "transformers_version": "4.48.0.dev0",
54
+ "vocab_size": 50368
 
 
55
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4d3528a3b74cbc73f8babcccaa6db84cfe6b8a59b11ba24a427365207d85843d
3
- size 437958648
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:446cb91f9ec552d72c46a0f13ec9e4c918b247bf6e41f02108511c590479f0c6
3
+ size 598439784
runs/Dec25_08-20-14_ip-172-31-34-75/events.out.tfevents.1735114818.ip-172-31-34-75.12605.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:67802a0f1aed3c63baf54336532da088104a2c09786392f65e2d672c8bda74a7
3
+ size 5820
runs/Dec25_08-23-03_ip-172-31-34-75/events.out.tfevents.1735114984.ip-172-31-34-75.14321.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ae8a58533d3a4e71a91e9071dad6c8b48aec1eb510fb619a9caa43e1eb382f40
3
+ size 5820
runs/Dec25_08-28-22_ip-172-31-34-75/events.out.tfevents.1735115312.ip-172-31-34-75.14616.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2a7587412e620d3800a4aafa3dff2fa8c860e9f20eaccfd46afd3d62850efb75
3
+ size 6969
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a64e53f28915c2132d46140cb7854d311cdf2e60f59818255c1b166bd80b4ebe
3
- size 5432
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:215b240a700dda3533f8d4b572ab599fb9ebab1126ec87ec3389c5b3058f64d7
3
+ size 5368