ncardus commited on
Commit
9300a24
1 Parent(s): 568d8cd

ncardus/SeqClassifier

Browse files
Files changed (2) hide show
  1. README.md +67 -67
  2. model.safetensors +1 -1
README.md CHANGED
@@ -18,11 +18,11 @@ should probably proofread and complete it, then remove this comment. -->
18
 
19
  This model was trained from scratch on an unknown dataset.
20
  It achieves the following results on the evaluation set:
21
- - Loss: 0.2105
22
- - Accuracy: 1.0
23
- - F1: 1.0
24
- - Precision: 1.0
25
- - Recall: 1.0
26
 
27
  ## Model description
28
 
@@ -51,68 +51,68 @@ The following hyperparameters were used during training:
51
 
52
  ### Training results
53
 
54
- | Training Loss | Epoch | Step | Validation Loss | Accuracy | F1 | Precision | Recall |
55
- |:-------------:|:-----:|:----:|:---------------:|:--------:|:---:|:---------:|:------:|
56
- | 0.7273 | 1.67 | 5 | 0.6845 | 1.0 | 1.0 | 1.0 | 1.0 |
57
- | 0.6603 | 3.33 | 10 | 0.6026 | 1.0 | 1.0 | 1.0 | 1.0 |
58
- | 0.5979 | 5.0 | 15 | 0.5289 | 1.0 | 1.0 | 1.0 | 1.0 |
59
- | 0.545 | 6.67 | 20 | 0.4681 | 1.0 | 1.0 | 1.0 | 1.0 |
60
- | 0.5198 | 8.33 | 25 | 0.4188 | 1.0 | 1.0 | 1.0 | 1.0 |
61
- | 0.4898 | 10.0 | 30 | 0.3756 | 1.0 | 1.0 | 1.0 | 1.0 |
62
- | 0.4727 | 11.67 | 35 | 0.3397 | 1.0 | 1.0 | 1.0 | 1.0 |
63
- | 0.3947 | 13.33 | 40 | 0.3097 | 1.0 | 1.0 | 1.0 | 1.0 |
64
- | 0.4466 | 15.0 | 45 | 0.2843 | 1.0 | 1.0 | 1.0 | 1.0 |
65
- | 0.3898 | 16.67 | 50 | 0.2639 | 1.0 | 1.0 | 1.0 | 1.0 |
66
- | 0.4228 | 18.33 | 55 | 0.2479 | 1.0 | 1.0 | 1.0 | 1.0 |
67
- | 0.3761 | 20.0 | 60 | 0.2331 | 1.0 | 1.0 | 1.0 | 1.0 |
68
- | 0.3928 | 21.67 | 65 | 0.2200 | 1.0 | 1.0 | 1.0 | 1.0 |
69
- | 0.3673 | 23.33 | 70 | 0.2087 | 1.0 | 1.0 | 1.0 | 1.0 |
70
- | 0.3723 | 25.0 | 75 | 0.1984 | 1.0 | 1.0 | 1.0 | 1.0 |
71
- | 0.3893 | 26.67 | 80 | 0.1908 | 1.0 | 1.0 | 1.0 | 1.0 |
72
- | 0.358 | 28.33 | 85 | 0.1834 | 1.0 | 1.0 | 1.0 | 1.0 |
73
- | 0.3308 | 30.0 | 90 | 0.1769 | 1.0 | 1.0 | 1.0 | 1.0 |
74
- | 0.3926 | 31.67 | 95 | 0.1709 | 1.0 | 1.0 | 1.0 | 1.0 |
75
- | 0.3008 | 33.33 | 100 | 0.1654 | 1.0 | 1.0 | 1.0 | 1.0 |
76
- | 0.3742 | 35.0 | 105 | 0.1607 | 1.0 | 1.0 | 1.0 | 1.0 |
77
- | 0.3436 | 36.67 | 110 | 0.1568 | 1.0 | 1.0 | 1.0 | 1.0 |
78
- | 0.4007 | 38.33 | 115 | 0.1540 | 1.0 | 1.0 | 1.0 | 1.0 |
79
- | 0.3229 | 40.0 | 120 | 0.1514 | 1.0 | 1.0 | 1.0 | 1.0 |
80
- | 0.362 | 41.67 | 125 | 0.1489 | 1.0 | 1.0 | 1.0 | 1.0 |
81
- | 0.3211 | 43.33 | 130 | 0.1463 | 1.0 | 1.0 | 1.0 | 1.0 |
82
- | 0.3778 | 45.0 | 135 | 0.1438 | 1.0 | 1.0 | 1.0 | 1.0 |
83
- | 0.329 | 46.67 | 140 | 0.1427 | 1.0 | 1.0 | 1.0 | 1.0 |
84
- | 0.3456 | 48.33 | 145 | 0.1422 | 1.0 | 1.0 | 1.0 | 1.0 |
85
- | 0.3738 | 50.0 | 150 | 0.1412 | 1.0 | 1.0 | 1.0 | 1.0 |
86
- | 0.3352 | 51.67 | 155 | 0.1402 | 1.0 | 1.0 | 1.0 | 1.0 |
87
- | 0.3674 | 53.33 | 160 | 0.1399 | 1.0 | 1.0 | 1.0 | 1.0 |
88
- | 0.3463 | 55.0 | 165 | 0.1399 | 1.0 | 1.0 | 1.0 | 1.0 |
89
- | 0.3438 | 56.67 | 170 | 0.1406 | 1.0 | 1.0 | 1.0 | 1.0 |
90
- | 0.3135 | 58.33 | 175 | 0.1418 | 1.0 | 1.0 | 1.0 | 1.0 |
91
- | 0.391 | 60.0 | 180 | 0.1441 | 1.0 | 1.0 | 1.0 | 1.0 |
92
- | 0.3483 | 61.67 | 185 | 0.1471 | 1.0 | 1.0 | 1.0 | 1.0 |
93
- | 0.3418 | 63.33 | 190 | 0.1481 | 1.0 | 1.0 | 1.0 | 1.0 |
94
- | 0.3507 | 65.0 | 195 | 0.1483 | 1.0 | 1.0 | 1.0 | 1.0 |
95
- | 0.3579 | 66.67 | 200 | 0.1501 | 1.0 | 1.0 | 1.0 | 1.0 |
96
- | 0.344 | 68.33 | 205 | 0.1515 | 1.0 | 1.0 | 1.0 | 1.0 |
97
- | 0.3498 | 70.0 | 210 | 0.1541 | 1.0 | 1.0 | 1.0 | 1.0 |
98
- | 0.3495 | 71.67 | 215 | 0.1594 | 1.0 | 1.0 | 1.0 | 1.0 |
99
- | 0.3047 | 73.33 | 220 | 0.1633 | 1.0 | 1.0 | 1.0 | 1.0 |
100
- | 0.348 | 75.0 | 225 | 0.1678 | 1.0 | 1.0 | 1.0 | 1.0 |
101
- | 0.2986 | 76.67 | 230 | 0.1697 | 1.0 | 1.0 | 1.0 | 1.0 |
102
- | 0.4035 | 78.33 | 235 | 0.1745 | 1.0 | 1.0 | 1.0 | 1.0 |
103
- | 0.303 | 80.0 | 240 | 0.1790 | 1.0 | 1.0 | 1.0 | 1.0 |
104
- | 0.3498 | 81.67 | 245 | 0.1813 | 1.0 | 1.0 | 1.0 | 1.0 |
105
- | 0.3041 | 83.33 | 250 | 0.1819 | 1.0 | 1.0 | 1.0 | 1.0 |
106
- | 0.3521 | 85.0 | 255 | 0.1855 | 1.0 | 1.0 | 1.0 | 1.0 |
107
- | 0.3474 | 86.67 | 260 | 0.1889 | 1.0 | 1.0 | 1.0 | 1.0 |
108
- | 0.3186 | 88.33 | 265 | 0.1942 | 1.0 | 1.0 | 1.0 | 1.0 |
109
- | 0.3321 | 90.0 | 270 | 0.1978 | 1.0 | 1.0 | 1.0 | 1.0 |
110
- | 0.3395 | 91.67 | 275 | 0.2010 | 1.0 | 1.0 | 1.0 | 1.0 |
111
- | 0.3114 | 93.33 | 280 | 0.2042 | 1.0 | 1.0 | 1.0 | 1.0 |
112
- | 0.3381 | 95.0 | 285 | 0.2064 | 1.0 | 1.0 | 1.0 | 1.0 |
113
- | 0.3629 | 96.67 | 290 | 0.2085 | 1.0 | 1.0 | 1.0 | 1.0 |
114
- | 0.3224 | 98.33 | 295 | 0.2100 | 1.0 | 1.0 | 1.0 | 1.0 |
115
- | 0.2891 | 100.0 | 300 | 0.2105 | 1.0 | 1.0 | 1.0 | 1.0 |
116
 
117
 
118
  ### Framework versions
 
18
 
19
  This model was trained from scratch on an unknown dataset.
20
  It achieves the following results on the evaluation set:
21
+ - Loss: 0.5493
22
+ - Accuracy: 0.8
23
+ - F1: 0.4444
24
+ - Precision: 0.4
25
+ - Recall: 0.5
26
 
27
  ## Model description
28
 
 
51
 
52
  ### Training results
53
 
54
+ | Training Loss | Epoch | Step | Validation Loss | Accuracy | F1 | Precision | Recall |
55
+ |:-------------:|:-----:|:----:|:---------------:|:--------:|:------:|:---------:|:------:|
56
+ | 0.6578 | 1.67 | 5 | 0.6407 | 0.8 | 0.4444 | 0.4 | 0.5 |
57
+ | 0.6002 | 3.33 | 10 | 0.6036 | 0.8 | 0.4444 | 0.4 | 0.5 |
58
+ | 0.5433 | 5.0 | 15 | 0.5729 | 0.8 | 0.4444 | 0.4 | 0.5 |
59
+ | 0.5122 | 6.67 | 20 | 0.5487 | 0.8 | 0.4444 | 0.4 | 0.5 |
60
+ | 0.4455 | 8.33 | 25 | 0.5309 | 0.8 | 0.4444 | 0.4 | 0.5 |
61
+ | 0.4345 | 10.0 | 30 | 0.5186 | 0.8 | 0.4444 | 0.4 | 0.5 |
62
+ | 0.3858 | 11.67 | 35 | 0.5103 | 0.8 | 0.4444 | 0.4 | 0.5 |
63
+ | 0.429 | 13.33 | 40 | 0.5054 | 0.8 | 0.4444 | 0.4 | 0.5 |
64
+ | 0.3675 | 15.0 | 45 | 0.5025 | 0.8 | 0.4444 | 0.4 | 0.5 |
65
+ | 0.3323 | 16.67 | 50 | 0.5011 | 0.8 | 0.4444 | 0.4 | 0.5 |
66
+ | 0.3623 | 18.33 | 55 | 0.5006 | 0.8 | 0.4444 | 0.4 | 0.5 |
67
+ | 0.3728 | 20.0 | 60 | 0.5007 | 0.8 | 0.4444 | 0.4 | 0.5 |
68
+ | 0.3368 | 21.67 | 65 | 0.5015 | 0.8 | 0.4444 | 0.4 | 0.5 |
69
+ | 0.3226 | 23.33 | 70 | 0.5029 | 0.8 | 0.4444 | 0.4 | 0.5 |
70
+ | 0.3412 | 25.0 | 75 | 0.5047 | 0.8 | 0.4444 | 0.4 | 0.5 |
71
+ | 0.32 | 26.67 | 80 | 0.5066 | 0.8 | 0.4444 | 0.4 | 0.5 |
72
+ | 0.3098 | 28.33 | 85 | 0.5087 | 0.8 | 0.4444 | 0.4 | 0.5 |
73
+ | 0.3319 | 30.0 | 90 | 0.5110 | 0.8 | 0.4444 | 0.4 | 0.5 |
74
+ | 0.3404 | 31.67 | 95 | 0.5133 | 0.8 | 0.4444 | 0.4 | 0.5 |
75
+ | 0.2713 | 33.33 | 100 | 0.5159 | 0.8 | 0.4444 | 0.4 | 0.5 |
76
+ | 0.3269 | 35.0 | 105 | 0.5183 | 0.8 | 0.4444 | 0.4 | 0.5 |
77
+ | 0.3384 | 36.67 | 110 | 0.5202 | 0.8 | 0.4444 | 0.4 | 0.5 |
78
+ | 0.2879 | 38.33 | 115 | 0.5221 | 0.8 | 0.4444 | 0.4 | 0.5 |
79
+ | 0.2917 | 40.0 | 120 | 0.5241 | 0.8 | 0.4444 | 0.4 | 0.5 |
80
+ | 0.2662 | 41.67 | 125 | 0.5262 | 0.8 | 0.4444 | 0.4 | 0.5 |
81
+ | 0.3248 | 43.33 | 130 | 0.5280 | 0.8 | 0.4444 | 0.4 | 0.5 |
82
+ | 0.3407 | 45.0 | 135 | 0.5295 | 0.8 | 0.4444 | 0.4 | 0.5 |
83
+ | 0.2767 | 46.67 | 140 | 0.5311 | 0.8 | 0.4444 | 0.4 | 0.5 |
84
+ | 0.3517 | 48.33 | 145 | 0.5328 | 0.8 | 0.4444 | 0.4 | 0.5 |
85
+ | 0.2873 | 50.0 | 150 | 0.5342 | 0.8 | 0.4444 | 0.4 | 0.5 |
86
+ | 0.2713 | 51.67 | 155 | 0.5357 | 0.8 | 0.4444 | 0.4 | 0.5 |
87
+ | 0.3552 | 53.33 | 160 | 0.5365 | 0.8 | 0.4444 | 0.4 | 0.5 |
88
+ | 0.3268 | 55.0 | 165 | 0.5373 | 0.8 | 0.4444 | 0.4 | 0.5 |
89
+ | 0.3338 | 56.67 | 170 | 0.5380 | 0.8 | 0.4444 | 0.4 | 0.5 |
90
+ | 0.2583 | 58.33 | 175 | 0.5388 | 0.8 | 0.4444 | 0.4 | 0.5 |
91
+ | 0.3225 | 60.0 | 180 | 0.5394 | 0.8 | 0.4444 | 0.4 | 0.5 |
92
+ | 0.3264 | 61.67 | 185 | 0.5402 | 0.8 | 0.4444 | 0.4 | 0.5 |
93
+ | 0.2748 | 63.33 | 190 | 0.5411 | 0.8 | 0.4444 | 0.4 | 0.5 |
94
+ | 0.2917 | 65.0 | 195 | 0.5422 | 0.8 | 0.4444 | 0.4 | 0.5 |
95
+ | 0.3245 | 66.67 | 200 | 0.5429 | 0.8 | 0.4444 | 0.4 | 0.5 |
96
+ | 0.2481 | 68.33 | 205 | 0.5437 | 0.8 | 0.4444 | 0.4 | 0.5 |
97
+ | 0.3228 | 70.0 | 210 | 0.5444 | 0.8 | 0.4444 | 0.4 | 0.5 |
98
+ | 0.2481 | 71.67 | 215 | 0.5451 | 0.8 | 0.4444 | 0.4 | 0.5 |
99
+ | 0.3224 | 73.33 | 220 | 0.5456 | 0.8 | 0.4444 | 0.4 | 0.5 |
100
+ | 0.3423 | 75.0 | 225 | 0.5461 | 0.8 | 0.4444 | 0.4 | 0.5 |
101
+ | 0.3339 | 76.67 | 230 | 0.5463 | 0.8 | 0.4444 | 0.4 | 0.5 |
102
+ | 0.2978 | 78.33 | 235 | 0.5466 | 0.8 | 0.4444 | 0.4 | 0.5 |
103
+ | 0.2408 | 80.0 | 240 | 0.5470 | 0.8 | 0.4444 | 0.4 | 0.5 |
104
+ | 0.3282 | 81.67 | 245 | 0.5473 | 0.8 | 0.4444 | 0.4 | 0.5 |
105
+ | 0.2829 | 83.33 | 250 | 0.5478 | 0.8 | 0.4444 | 0.4 | 0.5 |
106
+ | 0.2991 | 85.0 | 255 | 0.5482 | 0.8 | 0.4444 | 0.4 | 0.5 |
107
+ | 0.3031 | 86.67 | 260 | 0.5484 | 0.8 | 0.4444 | 0.4 | 0.5 |
108
+ | 0.3316 | 88.33 | 265 | 0.5485 | 0.8 | 0.4444 | 0.4 | 0.5 |
109
+ | 0.2443 | 90.0 | 270 | 0.5487 | 0.8 | 0.4444 | 0.4 | 0.5 |
110
+ | 0.2861 | 91.67 | 275 | 0.5490 | 0.8 | 0.4444 | 0.4 | 0.5 |
111
+ | 0.2941 | 93.33 | 280 | 0.5491 | 0.8 | 0.4444 | 0.4 | 0.5 |
112
+ | 0.3573 | 95.0 | 285 | 0.5492 | 0.8 | 0.4444 | 0.4 | 0.5 |
113
+ | 0.3153 | 96.67 | 290 | 0.5493 | 0.8 | 0.4444 | 0.4 | 0.5 |
114
+ | 0.3079 | 98.33 | 295 | 0.5493 | 0.8 | 0.4444 | 0.4 | 0.5 |
115
+ | 0.2846 | 100.0 | 300 | 0.5493 | 0.8 | 0.4444 | 0.4 | 0.5 |
116
 
117
 
118
  ### Framework versions
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ef2f38cd52539a301c72abcfc6b2c30980ee110baecd9170ab29b1b49822d434
3
  size 11033176
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:22fe0f198b23d5a043b21a11d2b3740a49227a5e7408c54d36f6a1ebe31f73e8
3
  size 11033176