jason9693 commited on
Commit
c9fd8e2
1 Parent(s): c1f6ee1
Files changed (2) hide show
  1. config.json +4 -4
  2. pytorch_model.bin +1 -1
config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "_name_or_path": "jason9693/soongsil-bert-base",
3
- "accuracy": 0.8026525198938992,
4
  "architectures": [
5
  "RobertaForSequenceClassification"
6
  ],
@@ -8,7 +8,7 @@
8
  "bos_token_id": 0,
9
  "classifier_dropout": null,
10
  "eos_token_id": 2,
11
- "f1": 0.8268156424581007,
12
  "gradient_checkpointing": false,
13
  "hidden_act": "gelu",
14
  "hidden_dropout_prob": 0.1,
@@ -30,9 +30,9 @@
30
  "num_hidden_layers": 12,
31
  "pad_token_id": 1,
32
  "position_embedding_type": "absolute",
33
- "precision": 0.7481044650379107,
34
  "problem_type": "single_label_classification",
35
- "recall": 0.9240374609781478,
36
  "torch_dtype": "float32",
37
  "total_flos": 8.217624121867606e+19,
38
  "transformers_version": "4.18.0",
 
1
  {
2
  "_name_or_path": "jason9693/soongsil-bert-base",
3
+ "accuracy": 0.8320954907161804,
4
  "architectures": [
5
  "RobertaForSequenceClassification"
6
  ],
 
8
  "bos_token_id": 0,
9
  "classifier_dropout": null,
10
  "eos_token_id": 2,
11
+ "f1": 0.8526879218059111,
12
  "gradient_checkpointing": false,
13
  "hidden_act": "gelu",
14
  "hidden_dropout_prob": 0.1,
 
30
  "num_hidden_layers": 12,
31
  "pad_token_id": 1,
32
  "position_embedding_type": "absolute",
33
+ "precision": 0.7713684210526316,
34
  "problem_type": "single_label_classification",
35
+ "recall": 0.9531737773152965,
36
  "torch_dtype": "float32",
37
  "total_flos": 8.217624121867606e+19,
38
  "transformers_version": "4.18.0",
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f7631a66653f66d4c81605f68222dbaafcf20bca94ef011632d5a528d03ec8ed
3
  size 393398253
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e577fd50130c71f213e78dc24ef65b20573740e5985ebf72864cc0a846a7a3a2
3
  size 393398253