hamedkhaledi commited on
Commit
d5e53ea
1 Parent(s): e5868c3

Update model

Browse files
Files changed (3) hide show
  1. loss.tsv +6 -26
  2. pytorch_model.bin +2 -2
  3. training.log +146 -417
loss.tsv CHANGED
@@ -1,26 +1,6 @@
1
- EPOCH TIMESTAMP BAD_EPOCHS LEARNING_RATE TRAIN_LOSS
2
- 1 02:11:17 0 0.1000 0.29944513383106386
3
- 2 02:13:08 0 0.1000 0.19280993853194325
4
- 3 02:14:59 0 0.1000 0.16545134829518973
5
- 4 02:16:48 0 0.1000 0.14994093501787764
6
- 5 02:18:39 0 0.1000 0.14055903154017835
7
- 6 02:20:30 0 0.1000 0.13162334187794475
8
- 7 02:22:22 0 0.1000 0.1255392909416602
9
- 8 02:24:13 0 0.1000 0.12040610092126014
10
- 9 02:26:05 0 0.1000 0.11533507664751852
11
- 10 02:27:57 0 0.1000 0.11078567317464759
12
- 11 02:29:48 0 0.1000 0.10792523206622268
13
- 12 02:31:37 0 0.1000 0.1049093249554238
14
- 13 02:33:28 0 0.1000 0.10211521327629115
15
- 14 02:35:25 0 0.1000 0.09980944747815595
16
- 15 02:37:14 0 0.1000 0.09761941123208488
17
- 16 02:39:06 0 0.1000 0.0953953896611401
18
- 17 02:40:58 0 0.1000 0.09268245582877983
19
- 18 02:42:50 0 0.1000 0.08999807655818322
20
- 19 02:44:41 0 0.1000 0.08849618166154223
21
- 20 02:46:32 0 0.1000 0.08828770131217435
22
- 21 02:48:21 0 0.1000 0.08494609398739676
23
- 22 02:50:11 0 0.1000 0.08349820143082182
24
- 23 02:52:02 0 0.1000 0.08202487858966351
25
- 24 02:53:52 0 0.1000 0.08095665230448137
26
- 25 02:55:42 0 0.1000 0.07815882924953178
 
1
+ EPOCH TIMESTAMP BAD_EPOCHS LEARNING_RATE TRAIN_LOSS DEV_LOSS DEV_PRECISION DEV_RECALL DEV_F1 DEV_ACCURACY
2
+ 1 16:21:08 0 0.1000 0.3021370332429696 0.1289350390434265 0.9601 0.9601 0.9601 0.9601
3
+ 2 16:43:34 0 0.1000 0.19530593042842243 0.10149012506008148 0.9708 0.9708 0.9708 0.9708
4
+ 3 17:05:58 0 0.1000 0.16937352967357722 0.09684865176677704 0.9731 0.9731 0.9731 0.9731
5
+ 4 17:28:23 0 0.1000 0.15777116446677278 0.09011354297399521 0.9744 0.9744 0.9744 0.9744
6
+ 5 17:50:33 0 0.1000 0.14917361768721923 0.08973350375890732 0.9746 0.9746 0.9746 0.9746
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d61cc8ee1b5685e4ec1ac4d69a813a47a878cf3105bf43d2c43e03dff3d71865
3
- size 378844161
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:155e918dac8cac5257ea9bee3655c25d69c1f296303e8933004b62a255e1b94b
3
+ size 415543931
training.log CHANGED
@@ -1,10 +1,7 @@
1
- 2022-08-07 02:05:29,506 ----------------------------------------------------------------------------------------------------
2
- 2022-08-07 02:05:29,507 Model: "SequenceTagger(
3
  (embeddings): StackedEmbeddings(
4
- (list_embedding_0): WordEmbeddings(
5
- 'fa'
6
- (embedding): Embedding(56850, 300)
7
- )
8
  (list_embedding_1): FlairEmbeddings(
9
  (lm): LanguageModel(
10
  (drop): Dropout(p=0.1, inplace=False)
@@ -24,423 +21,155 @@
24
  )
25
  (word_dropout): WordDropout(p=0.05)
26
  (locked_dropout): LockedDropout(p=0.5)
27
- (rnn): LSTM(4396, 512, batch_first=True, bidirectional=True)
28
- (linear): Linear(in_features=1024, out_features=31, bias=True)
29
- (loss_function): CrossEntropyLoss()
 
 
 
30
  )"
31
- 2022-08-07 02:05:29,507 ----------------------------------------------------------------------------------------------------
32
- 2022-08-07 02:05:29,507 Corpus: "Corpus: 24000 train + 3000 dev + 3000 test sentences"
33
- 2022-08-07 02:05:29,507 ----------------------------------------------------------------------------------------------------
34
- 2022-08-07 02:05:29,507 Parameters:
35
- 2022-08-07 02:05:29,507 - learning_rate: "0.100000"
36
- 2022-08-07 02:05:29,507 - mini_batch_size: "8"
37
- 2022-08-07 02:05:29,507 - patience: "3"
38
- 2022-08-07 02:05:29,507 - anneal_factor: "0.5"
39
- 2022-08-07 02:05:29,507 - max_epochs: "25"
40
- 2022-08-07 02:05:29,507 - shuffle: "True"
41
- 2022-08-07 02:05:29,508 - train_with_dev: "True"
42
- 2022-08-07 02:05:29,508 - batch_growth_annealing: "False"
43
- 2022-08-07 02:05:29,508 ----------------------------------------------------------------------------------------------------
44
- 2022-08-07 02:05:29,508 Model training base path: "data/pos-Uppsala/model2"
45
- 2022-08-07 02:05:29,508 ----------------------------------------------------------------------------------------------------
46
- 2022-08-07 02:05:29,508 Device: cuda:0
47
- 2022-08-07 02:05:29,508 ----------------------------------------------------------------------------------------------------
48
- 2022-08-07 02:05:29,508 Embeddings storage mode: cpu
49
- 2022-08-07 02:05:29,508 ----------------------------------------------------------------------------------------------------
50
- 2022-08-07 02:05:55,999 epoch 1 - iter 337/3375 - loss 0.75739218 - samples/sec: 101.85 - lr: 0.100000
51
- 2022-08-07 02:06:37,080 epoch 1 - iter 674/3375 - loss 0.54143209 - samples/sec: 65.66 - lr: 0.100000
52
- 2022-08-07 02:07:09,607 epoch 1 - iter 1011/3375 - loss 0.46568471 - samples/sec: 82.95 - lr: 0.100000
53
- 2022-08-07 02:07:45,920 epoch 1 - iter 1348/3375 - loss 0.41181559 - samples/sec: 74.30 - lr: 0.100000
54
- 2022-08-07 02:08:21,278 epoch 1 - iter 1685/3375 - loss 0.37828799 - samples/sec: 76.30 - lr: 0.100000
55
- 2022-08-07 02:08:54,851 epoch 1 - iter 2022/3375 - loss 0.35283483 - samples/sec: 80.36 - lr: 0.100000
56
- 2022-08-07 02:09:32,124 epoch 1 - iter 2359/3375 - loss 0.33395991 - samples/sec: 72.38 - lr: 0.100000
57
- 2022-08-07 02:10:05,700 epoch 1 - iter 2696/3375 - loss 0.32651650 - samples/sec: 80.35 - lr: 0.100000
58
- 2022-08-07 02:10:40,669 epoch 1 - iter 3033/3375 - loss 0.31224939 - samples/sec: 77.15 - lr: 0.100000
59
- 2022-08-07 02:11:16,615 epoch 1 - iter 3370/3375 - loss 0.29974125 - samples/sec: 75.05 - lr: 0.100000
60
- 2022-08-07 02:11:17,267 ----------------------------------------------------------------------------------------------------
61
- 2022-08-07 02:11:17,267 EPOCH 1 done: loss 0.2994 - lr 0.100000
62
- 2022-08-07 02:11:17,267 BAD EPOCHS (no improvement): 0
63
- 2022-08-07 02:11:17,267 ----------------------------------------------------------------------------------------------------
64
- 2022-08-07 02:11:28,327 epoch 2 - iter 337/3375 - loss 0.21007321 - samples/sec: 244.45 - lr: 0.100000
65
- 2022-08-07 02:11:39,198 epoch 2 - iter 674/3375 - loss 0.20500491 - samples/sec: 248.66 - lr: 0.100000
66
- 2022-08-07 02:11:50,425 epoch 2 - iter 1011/3375 - loss 0.20492344 - samples/sec: 240.79 - lr: 0.100000
67
- 2022-08-07 02:12:01,420 epoch 2 - iter 1348/3375 - loss 0.20363866 - samples/sec: 245.85 - lr: 0.100000
68
- 2022-08-07 02:12:12,667 epoch 2 - iter 1685/3375 - loss 0.20392884 - samples/sec: 240.35 - lr: 0.100000
69
- 2022-08-07 02:12:23,586 epoch 2 - iter 2022/3375 - loss 0.20044378 - samples/sec: 247.55 - lr: 0.100000
70
- 2022-08-07 02:12:34,574 epoch 2 - iter 2359/3375 - loss 0.19761727 - samples/sec: 246.01 - lr: 0.100000
71
- 2022-08-07 02:12:45,723 epoch 2 - iter 2696/3375 - loss 0.19607425 - samples/sec: 242.47 - lr: 0.100000
72
- 2022-08-07 02:12:56,882 epoch 2 - iter 3033/3375 - loss 0.19437688 - samples/sec: 242.22 - lr: 0.100000
73
- 2022-08-07 02:13:07,970 epoch 2 - iter 3370/3375 - loss 0.19287658 - samples/sec: 243.74 - lr: 0.100000
74
- 2022-08-07 02:13:08,125 ----------------------------------------------------------------------------------------------------
75
- 2022-08-07 02:13:08,125 EPOCH 2 done: loss 0.1928 - lr 0.100000
76
- 2022-08-07 02:13:08,125 BAD EPOCHS (no improvement): 0
77
- 2022-08-07 02:13:08,125 ----------------------------------------------------------------------------------------------------
78
- 2022-08-07 02:13:18,732 epoch 3 - iter 337/3375 - loss 0.16840639 - samples/sec: 254.79 - lr: 0.100000
79
- 2022-08-07 02:13:29,949 epoch 3 - iter 674/3375 - loss 0.17134103 - samples/sec: 240.99 - lr: 0.100000
80
- 2022-08-07 02:13:41,422 epoch 3 - iter 1011/3375 - loss 0.17177587 - samples/sec: 235.63 - lr: 0.100000
81
- 2022-08-07 02:13:52,505 epoch 3 - iter 1348/3375 - loss 0.17139864 - samples/sec: 243.87 - lr: 0.100000
82
- 2022-08-07 02:14:03,658 epoch 3 - iter 1685/3375 - loss 0.16962796 - samples/sec: 242.33 - lr: 0.100000
83
- 2022-08-07 02:14:14,742 epoch 3 - iter 2022/3375 - loss 0.16969660 - samples/sec: 243.85 - lr: 0.100000
84
- 2022-08-07 02:14:26,081 epoch 3 - iter 2359/3375 - loss 0.16840264 - samples/sec: 238.43 - lr: 0.100000
85
- 2022-08-07 02:14:37,186 epoch 3 - iter 2696/3375 - loss 0.16736992 - samples/sec: 243.39 - lr: 0.100000
86
- 2022-08-07 02:14:48,028 epoch 3 - iter 3033/3375 - loss 0.16650554 - samples/sec: 249.32 - lr: 0.100000
87
- 2022-08-07 02:14:58,860 epoch 3 - iter 3370/3375 - loss 0.16548808 - samples/sec: 249.55 - lr: 0.100000
88
- 2022-08-07 02:14:59,028 ----------------------------------------------------------------------------------------------------
89
- 2022-08-07 02:14:59,028 EPOCH 3 done: loss 0.1655 - lr 0.100000
90
- 2022-08-07 02:14:59,028 BAD EPOCHS (no improvement): 0
91
- 2022-08-07 02:14:59,029 ----------------------------------------------------------------------------------------------------
92
- 2022-08-07 02:15:10,190 epoch 4 - iter 337/3375 - loss 0.16471399 - samples/sec: 242.21 - lr: 0.100000
93
- 2022-08-07 02:15:21,371 epoch 4 - iter 674/3375 - loss 0.16008298 - samples/sec: 241.76 - lr: 0.100000
94
- 2022-08-07 02:15:32,514 epoch 4 - iter 1011/3375 - loss 0.15720752 - samples/sec: 242.57 - lr: 0.100000
95
- 2022-08-07 02:15:43,108 epoch 4 - iter 1348/3375 - loss 0.15610167 - samples/sec: 255.09 - lr: 0.100000
96
- 2022-08-07 02:15:53,702 epoch 4 - iter 1685/3375 - loss 0.15385958 - samples/sec: 255.15 - lr: 0.100000
97
- 2022-08-07 02:16:04,903 epoch 4 - iter 2022/3375 - loss 0.15358797 - samples/sec: 241.33 - lr: 0.100000
98
- 2022-08-07 02:16:16,037 epoch 4 - iter 2359/3375 - loss 0.15218143 - samples/sec: 242.77 - lr: 0.100000
99
- 2022-08-07 02:16:26,713 epoch 4 - iter 2696/3375 - loss 0.15094360 - samples/sec: 253.18 - lr: 0.100000
100
- 2022-08-07 02:16:37,442 epoch 4 - iter 3033/3375 - loss 0.15045144 - samples/sec: 251.90 - lr: 0.100000
101
- 2022-08-07 02:16:48,787 epoch 4 - iter 3370/3375 - loss 0.14985109 - samples/sec: 238.27 - lr: 0.100000
102
- 2022-08-07 02:16:48,934 ----------------------------------------------------------------------------------------------------
103
- 2022-08-07 02:16:48,934 EPOCH 4 done: loss 0.1499 - lr 0.100000
104
- 2022-08-07 02:16:48,934 BAD EPOCHS (no improvement): 0
105
- 2022-08-07 02:16:48,935 ----------------------------------------------------------------------------------------------------
106
- 2022-08-07 02:16:59,994 epoch 5 - iter 337/3375 - loss 0.13777412 - samples/sec: 244.45 - lr: 0.100000
107
- 2022-08-07 02:17:11,035 epoch 5 - iter 674/3375 - loss 0.14094211 - samples/sec: 244.80 - lr: 0.100000
108
- 2022-08-07 02:17:21,594 epoch 5 - iter 1011/3375 - loss 0.14015121 - samples/sec: 255.97 - lr: 0.100000
109
- 2022-08-07 02:17:32,943 epoch 5 - iter 1348/3375 - loss 0.14302843 - samples/sec: 238.17 - lr: 0.100000
110
- 2022-08-07 02:17:44,086 epoch 5 - iter 1685/3375 - loss 0.14201900 - samples/sec: 242.58 - lr: 0.100000
111
- 2022-08-07 02:17:55,115 epoch 5 - iter 2022/3375 - loss 0.14188630 - samples/sec: 245.10 - lr: 0.100000
112
- 2022-08-07 02:18:06,000 epoch 5 - iter 2359/3375 - loss 0.14062047 - samples/sec: 248.40 - lr: 0.100000
113
- 2022-08-07 02:18:17,153 epoch 5 - iter 2696/3375 - loss 0.14139705 - samples/sec: 242.40 - lr: 0.100000
114
- 2022-08-07 02:18:28,428 epoch 5 - iter 3033/3375 - loss 0.14102027 - samples/sec: 239.77 - lr: 0.100000
115
- 2022-08-07 02:18:39,576 epoch 5 - iter 3370/3375 - loss 0.14054028 - samples/sec: 242.49 - lr: 0.100000
116
- 2022-08-07 02:18:39,730 ----------------------------------------------------------------------------------------------------
117
- 2022-08-07 02:18:39,730 EPOCH 5 done: loss 0.1406 - lr 0.100000
118
- 2022-08-07 02:18:39,731 BAD EPOCHS (no improvement): 0
119
- 2022-08-07 02:18:39,731 ----------------------------------------------------------------------------------------------------
120
- 2022-08-07 02:18:50,258 epoch 6 - iter 337/3375 - loss 0.13602802 - samples/sec: 256.82 - lr: 0.100000
121
- 2022-08-07 02:19:01,074 epoch 6 - iter 674/3375 - loss 0.13337600 - samples/sec: 249.87 - lr: 0.100000
122
- 2022-08-07 02:19:11,917 epoch 6 - iter 1011/3375 - loss 0.13048398 - samples/sec: 249.28 - lr: 0.100000
123
- 2022-08-07 02:19:23,313 epoch 6 - iter 1348/3375 - loss 0.12983568 - samples/sec: 237.22 - lr: 0.100000
124
- 2022-08-07 02:19:34,736 epoch 6 - iter 1685/3375 - loss 0.13111942 - samples/sec: 236.69 - lr: 0.100000
125
- 2022-08-07 02:19:45,806 epoch 6 - iter 2022/3375 - loss 0.13110468 - samples/sec: 244.19 - lr: 0.100000
126
- 2022-08-07 02:19:57,573 epoch 6 - iter 2359/3375 - loss 0.13121469 - samples/sec: 229.74 - lr: 0.100000
127
- 2022-08-07 02:20:08,293 epoch 6 - iter 2696/3375 - loss 0.13079228 - samples/sec: 252.22 - lr: 0.100000
128
- 2022-08-07 02:20:19,667 epoch 6 - iter 3033/3375 - loss 0.13218291 - samples/sec: 237.66 - lr: 0.100000
129
- 2022-08-07 02:20:30,717 epoch 6 - iter 3370/3375 - loss 0.13161722 - samples/sec: 244.63 - lr: 0.100000
130
- 2022-08-07 02:20:30,883 ----------------------------------------------------------------------------------------------------
131
- 2022-08-07 02:20:30,883 EPOCH 6 done: loss 0.1316 - lr 0.100000
132
- 2022-08-07 02:20:30,884 BAD EPOCHS (no improvement): 0
133
- 2022-08-07 02:20:30,884 ----------------------------------------------------------------------------------------------------
134
- 2022-08-07 02:20:41,904 epoch 7 - iter 337/3375 - loss 0.12469525 - samples/sec: 245.31 - lr: 0.100000
135
- 2022-08-07 02:20:53,093 epoch 7 - iter 674/3375 - loss 0.12698511 - samples/sec: 241.58 - lr: 0.100000
136
- 2022-08-07 02:21:04,011 epoch 7 - iter 1011/3375 - loss 0.12657046 - samples/sec: 247.61 - lr: 0.100000
137
- 2022-08-07 02:21:15,101 epoch 7 - iter 1348/3375 - loss 0.12672328 - samples/sec: 243.75 - lr: 0.100000
138
- 2022-08-07 02:21:26,075 epoch 7 - iter 1685/3375 - loss 0.12594031 - samples/sec: 246.31 - lr: 0.100000
139
- 2022-08-07 02:21:37,003 epoch 7 - iter 2022/3375 - loss 0.12642873 - samples/sec: 247.30 - lr: 0.100000
140
- 2022-08-07 02:21:48,160 epoch 7 - iter 2359/3375 - loss 0.12583060 - samples/sec: 242.39 - lr: 0.100000
141
- 2022-08-07 02:21:59,410 epoch 7 - iter 2696/3375 - loss 0.12552168 - samples/sec: 240.33 - lr: 0.100000
142
- 2022-08-07 02:22:10,826 epoch 7 - iter 3033/3375 - loss 0.12531533 - samples/sec: 236.80 - lr: 0.100000
143
- 2022-08-07 02:22:22,230 epoch 7 - iter 3370/3375 - loss 0.12554230 - samples/sec: 237.02 - lr: 0.100000
144
- 2022-08-07 02:22:22,372 ----------------------------------------------------------------------------------------------------
145
- 2022-08-07 02:22:22,372 EPOCH 7 done: loss 0.1255 - lr 0.100000
146
- 2022-08-07 02:22:22,372 BAD EPOCHS (no improvement): 0
147
- 2022-08-07 02:22:22,372 ----------------------------------------------------------------------------------------------------
148
- 2022-08-07 02:22:33,973 epoch 8 - iter 337/3375 - loss 0.11902872 - samples/sec: 233.03 - lr: 0.100000
149
- 2022-08-07 02:22:45,079 epoch 8 - iter 674/3375 - loss 0.11995484 - samples/sec: 243.37 - lr: 0.100000
150
- 2022-08-07 02:22:56,079 epoch 8 - iter 1011/3375 - loss 0.12447185 - samples/sec: 245.73 - lr: 0.100000
151
- 2022-08-07 02:23:07,005 epoch 8 - iter 1348/3375 - loss 0.12186016 - samples/sec: 247.36 - lr: 0.100000
152
- 2022-08-07 02:23:18,142 epoch 8 - iter 1685/3375 - loss 0.12180914 - samples/sec: 242.72 - lr: 0.100000
153
- 2022-08-07 02:23:28,937 epoch 8 - iter 2022/3375 - loss 0.12178735 - samples/sec: 250.35 - lr: 0.100000
154
- 2022-08-07 02:23:39,666 epoch 8 - iter 2359/3375 - loss 0.12100308 - samples/sec: 251.94 - lr: 0.100000
155
- 2022-08-07 02:23:50,288 epoch 8 - iter 2696/3375 - loss 0.12098102 - samples/sec: 254.43 - lr: 0.100000
156
- 2022-08-07 02:24:01,466 epoch 8 - iter 3033/3375 - loss 0.12091111 - samples/sec: 241.86 - lr: 0.100000
157
- 2022-08-07 02:24:12,938 epoch 8 - iter 3370/3375 - loss 0.12040225 - samples/sec: 235.63 - lr: 0.100000
158
- 2022-08-07 02:24:13,123 ----------------------------------------------------------------------------------------------------
159
- 2022-08-07 02:24:13,123 EPOCH 8 done: loss 0.1204 - lr 0.100000
160
- 2022-08-07 02:24:13,123 BAD EPOCHS (no improvement): 0
161
- 2022-08-07 02:24:13,124 ----------------------------------------------------------------------------------------------------
162
- 2022-08-07 02:24:24,336 epoch 9 - iter 337/3375 - loss 0.11290030 - samples/sec: 241.17 - lr: 0.100000
163
- 2022-08-07 02:24:35,714 epoch 9 - iter 674/3375 - loss 0.11346945 - samples/sec: 237.60 - lr: 0.100000
164
- 2022-08-07 02:24:47,141 epoch 9 - iter 1011/3375 - loss 0.11401393 - samples/sec: 236.57 - lr: 0.100000
165
- 2022-08-07 02:24:58,300 epoch 9 - iter 1348/3375 - loss 0.11366582 - samples/sec: 242.28 - lr: 0.100000
166
- 2022-08-07 02:25:09,198 epoch 9 - iter 1685/3375 - loss 0.11338815 - samples/sec: 248.01 - lr: 0.100000
167
- 2022-08-07 02:25:20,242 epoch 9 - iter 2022/3375 - loss 0.11280167 - samples/sec: 244.76 - lr: 0.100000
168
- 2022-08-07 02:25:31,000 epoch 9 - iter 2359/3375 - loss 0.11258570 - samples/sec: 251.27 - lr: 0.100000
169
- 2022-08-07 02:25:42,675 epoch 9 - iter 2696/3375 - loss 0.11476017 - samples/sec: 231.53 - lr: 0.100000
170
- 2022-08-07 02:25:54,127 epoch 9 - iter 3033/3375 - loss 0.11505920 - samples/sec: 236.04 - lr: 0.100000
171
- 2022-08-07 02:26:05,370 epoch 9 - iter 3370/3375 - loss 0.11528098 - samples/sec: 240.42 - lr: 0.100000
172
- 2022-08-07 02:26:05,525 ----------------------------------------------------------------------------------------------------
173
- 2022-08-07 02:26:05,525 EPOCH 9 done: loss 0.1153 - lr 0.100000
174
- 2022-08-07 02:26:05,525 BAD EPOCHS (no improvement): 0
175
- 2022-08-07 02:26:05,526 ----------------------------------------------------------------------------------------------------
176
- 2022-08-07 02:26:16,488 epoch 10 - iter 337/3375 - loss 0.10947414 - samples/sec: 246.62 - lr: 0.100000
177
- 2022-08-07 02:26:27,490 epoch 10 - iter 674/3375 - loss 0.11130776 - samples/sec: 245.64 - lr: 0.100000
178
- 2022-08-07 02:26:38,267 epoch 10 - iter 1011/3375 - loss 0.10972401 - samples/sec: 250.80 - lr: 0.100000
179
- 2022-08-07 02:26:49,354 epoch 10 - iter 1348/3375 - loss 0.10872413 - samples/sec: 243.79 - lr: 0.100000
180
- 2022-08-07 02:27:00,642 epoch 10 - iter 1685/3375 - loss 0.10961811 - samples/sec: 239.51 - lr: 0.100000
181
- 2022-08-07 02:27:11,930 epoch 10 - iter 2022/3375 - loss 0.11048220 - samples/sec: 239.46 - lr: 0.100000
182
- 2022-08-07 02:27:23,290 epoch 10 - iter 2359/3375 - loss 0.11060959 - samples/sec: 237.93 - lr: 0.100000
183
- 2022-08-07 02:27:34,821 epoch 10 - iter 2696/3375 - loss 0.11126846 - samples/sec: 234.45 - lr: 0.100000
184
- 2022-08-07 02:27:45,925 epoch 10 - iter 3033/3375 - loss 0.11105431 - samples/sec: 243.40 - lr: 0.100000
185
- 2022-08-07 02:27:57,172 epoch 10 - iter 3370/3375 - loss 0.11075921 - samples/sec: 240.37 - lr: 0.100000
186
- 2022-08-07 02:27:57,325 ----------------------------------------------------------------------------------------------------
187
- 2022-08-07 02:27:57,326 EPOCH 10 done: loss 0.1108 - lr 0.100000
188
- 2022-08-07 02:27:57,326 BAD EPOCHS (no improvement): 0
189
- 2022-08-07 02:27:57,326 ----------------------------------------------------------------------------------------------------
190
- 2022-08-07 02:28:08,659 epoch 11 - iter 337/3375 - loss 0.11076857 - samples/sec: 238.59 - lr: 0.100000
191
- 2022-08-07 02:28:19,669 epoch 11 - iter 674/3375 - loss 0.10651420 - samples/sec: 245.50 - lr: 0.100000
192
- 2022-08-07 02:28:30,995 epoch 11 - iter 1011/3375 - loss 0.10568094 - samples/sec: 238.65 - lr: 0.100000
193
- 2022-08-07 02:28:42,325 epoch 11 - iter 1348/3375 - loss 0.10905778 - samples/sec: 238.58 - lr: 0.100000
194
- 2022-08-07 02:28:53,100 epoch 11 - iter 1685/3375 - loss 0.10889745 - samples/sec: 250.81 - lr: 0.100000
195
- 2022-08-07 02:29:04,246 epoch 11 - iter 2022/3375 - loss 0.10882025 - samples/sec: 242.52 - lr: 0.100000
196
- 2022-08-07 02:29:15,152 epoch 11 - iter 2359/3375 - loss 0.10841426 - samples/sec: 247.86 - lr: 0.100000
197
- 2022-08-07 02:29:25,976 epoch 11 - iter 2696/3375 - loss 0.10880072 - samples/sec: 249.68 - lr: 0.100000
198
- 2022-08-07 02:29:37,366 epoch 11 - iter 3033/3375 - loss 0.10817257 - samples/sec: 237.30 - lr: 0.100000
199
- 2022-08-07 02:29:48,295 epoch 11 - iter 3370/3375 - loss 0.10790525 - samples/sec: 247.35 - lr: 0.100000
200
- 2022-08-07 02:29:48,441 ----------------------------------------------------------------------------------------------------
201
- 2022-08-07 02:29:48,441 EPOCH 11 done: loss 0.1079 - lr 0.100000
202
- 2022-08-07 02:29:48,441 BAD EPOCHS (no improvement): 0
203
- 2022-08-07 02:29:48,441 ----------------------------------------------------------------------------------------------------
204
- 2022-08-07 02:29:59,308 epoch 12 - iter 337/3375 - loss 0.10369701 - samples/sec: 248.76 - lr: 0.100000
205
- 2022-08-07 02:30:10,007 epoch 12 - iter 674/3375 - loss 0.10434462 - samples/sec: 252.65 - lr: 0.100000
206
- 2022-08-07 02:30:21,057 epoch 12 - iter 1011/3375 - loss 0.10301943 - samples/sec: 244.62 - lr: 0.100000
207
- 2022-08-07 02:30:31,884 epoch 12 - iter 1348/3375 - loss 0.10382269 - samples/sec: 249.63 - lr: 0.100000
208
- 2022-08-07 02:30:42,874 epoch 12 - iter 1685/3375 - loss 0.10299628 - samples/sec: 245.95 - lr: 0.100000
209
- 2022-08-07 02:30:53,851 epoch 12 - iter 2022/3375 - loss 0.10337040 - samples/sec: 246.26 - lr: 0.100000
210
- 2022-08-07 02:31:05,148 epoch 12 - iter 2359/3375 - loss 0.10373705 - samples/sec: 239.29 - lr: 0.100000
211
- 2022-08-07 02:31:16,303 epoch 12 - iter 2696/3375 - loss 0.10369999 - samples/sec: 242.29 - lr: 0.100000
212
- 2022-08-07 02:31:27,420 epoch 12 - iter 3033/3375 - loss 0.10361166 - samples/sec: 243.12 - lr: 0.100000
213
- 2022-08-07 02:31:37,704 epoch 12 - iter 3370/3375 - loss 0.10497405 - samples/sec: 262.83 - lr: 0.100000
214
- 2022-08-07 02:31:37,879 ----------------------------------------------------------------------------------------------------
215
- 2022-08-07 02:31:37,879 EPOCH 12 done: loss 0.1049 - lr 0.100000
216
- 2022-08-07 02:31:37,879 BAD EPOCHS (no improvement): 0
217
- 2022-08-07 02:31:37,879 ----------------------------------------------------------------------------------------------------
218
- 2022-08-07 02:31:49,490 epoch 13 - iter 337/3375 - loss 0.09771881 - samples/sec: 232.85 - lr: 0.100000
219
- 2022-08-07 02:32:00,601 epoch 13 - iter 674/3375 - loss 0.10062735 - samples/sec: 243.28 - lr: 0.100000
220
- 2022-08-07 02:32:11,552 epoch 13 - iter 1011/3375 - loss 0.10145832 - samples/sec: 246.84 - lr: 0.100000
221
- 2022-08-07 02:32:22,854 epoch 13 - iter 1348/3375 - loss 0.10164191 - samples/sec: 239.18 - lr: 0.100000
222
- 2022-08-07 02:32:34,078 epoch 13 - iter 1685/3375 - loss 0.10188188 - samples/sec: 240.81 - lr: 0.100000
223
- 2022-08-07 02:32:44,783 epoch 13 - iter 2022/3375 - loss 0.10230566 - samples/sec: 252.52 - lr: 0.100000
224
- 2022-08-07 02:32:55,846 epoch 13 - iter 2359/3375 - loss 0.10241548 - samples/sec: 244.33 - lr: 0.100000
225
- 2022-08-07 02:33:06,906 epoch 13 - iter 2696/3375 - loss 0.10240741 - samples/sec: 244.36 - lr: 0.100000
226
- 2022-08-07 02:33:17,449 epoch 13 - iter 3033/3375 - loss 0.10221738 - samples/sec: 256.34 - lr: 0.100000
227
- 2022-08-07 02:33:28,620 epoch 13 - iter 3370/3375 - loss 0.10209074 - samples/sec: 242.02 - lr: 0.100000
228
- 2022-08-07 02:33:28,766 ----------------------------------------------------------------------------------------------------
229
- 2022-08-07 02:33:28,766 EPOCH 13 done: loss 0.1021 - lr 0.100000
230
- 2022-08-07 02:33:28,766 BAD EPOCHS (no improvement): 0
231
- 2022-08-07 02:33:28,767 ----------------------------------------------------------------------------------------------------
232
- 2022-08-07 02:33:39,870 epoch 14 - iter 337/3375 - loss 0.09796604 - samples/sec: 243.51 - lr: 0.100000
233
- 2022-08-07 02:33:50,605 epoch 14 - iter 674/3375 - loss 0.09578931 - samples/sec: 251.77 - lr: 0.100000
234
- 2022-08-07 02:34:01,922 epoch 14 - iter 1011/3375 - loss 0.10195230 - samples/sec: 238.83 - lr: 0.100000
235
- 2022-08-07 02:34:13,388 epoch 14 - iter 1348/3375 - loss 0.10021155 - samples/sec: 235.77 - lr: 0.100000
236
- 2022-08-07 02:34:24,657 epoch 14 - iter 1685/3375 - loss 0.10102509 - samples/sec: 239.86 - lr: 0.100000
237
- 2022-08-07 02:34:35,142 epoch 14 - iter 2022/3375 - loss 0.10069196 - samples/sec: 257.74 - lr: 0.100000
238
- 2022-08-07 02:34:51,364 epoch 14 - iter 2359/3375 - loss 0.10041362 - samples/sec: 166.48 - lr: 0.100000
239
- 2022-08-07 02:35:02,495 epoch 14 - iter 2696/3375 - loss 0.10042230 - samples/sec: 242.85 - lr: 0.100000
240
- 2022-08-07 02:35:13,930 epoch 14 - iter 3033/3375 - loss 0.10023253 - samples/sec: 236.38 - lr: 0.100000
241
- 2022-08-07 02:35:25,051 epoch 14 - iter 3370/3375 - loss 0.09980998 - samples/sec: 243.05 - lr: 0.100000
242
- 2022-08-07 02:35:25,195 ----------------------------------------------------------------------------------------------------
243
- 2022-08-07 02:35:25,196 EPOCH 14 done: loss 0.0998 - lr 0.100000
244
- 2022-08-07 02:35:25,196 BAD EPOCHS (no improvement): 0
245
- 2022-08-07 02:35:25,196 ----------------------------------------------------------------------------------------------------
246
- 2022-08-07 02:35:35,802 epoch 15 - iter 337/3375 - loss 0.09670690 - samples/sec: 254.85 - lr: 0.100000
247
- 2022-08-07 02:35:46,996 epoch 15 - iter 674/3375 - loss 0.09431814 - samples/sec: 241.48 - lr: 0.100000
248
- 2022-08-07 02:35:57,960 epoch 15 - iter 1011/3375 - loss 0.09377145 - samples/sec: 246.49 - lr: 0.100000
249
- 2022-08-07 02:36:08,715 epoch 15 - iter 1348/3375 - loss 0.09622842 - samples/sec: 251.39 - lr: 0.100000
250
- 2022-08-07 02:36:19,780 epoch 15 - iter 1685/3375 - loss 0.09767520 - samples/sec: 244.31 - lr: 0.100000
251
- 2022-08-07 02:36:30,749 epoch 15 - iter 2022/3375 - loss 0.09692809 - samples/sec: 246.42 - lr: 0.100000
252
- 2022-08-07 02:36:41,823 epoch 15 - iter 2359/3375 - loss 0.09696816 - samples/sec: 244.10 - lr: 0.100000
253
- 2022-08-07 02:36:52,983 epoch 15 - iter 2696/3375 - loss 0.09684092 - samples/sec: 242.20 - lr: 0.100000
254
- 2022-08-07 02:37:03,518 epoch 15 - iter 3033/3375 - loss 0.09721969 - samples/sec: 256.55 - lr: 0.100000
255
- 2022-08-07 02:37:14,478 epoch 15 - iter 3370/3375 - loss 0.09766576 - samples/sec: 246.62 - lr: 0.100000
256
- 2022-08-07 02:37:14,634 ----------------------------------------------------------------------------------------------------
257
- 2022-08-07 02:37:14,635 EPOCH 15 done: loss 0.0976 - lr 0.100000
258
- 2022-08-07 02:37:14,635 BAD EPOCHS (no improvement): 0
259
- 2022-08-07 02:37:14,635 ----------------------------------------------------------------------------------------------------
260
- 2022-08-07 02:37:25,907 epoch 16 - iter 337/3375 - loss 0.09192433 - samples/sec: 239.84 - lr: 0.100000
261
- 2022-08-07 02:37:37,145 epoch 16 - iter 674/3375 - loss 0.09118151 - samples/sec: 240.51 - lr: 0.100000
262
- 2022-08-07 02:37:47,961 epoch 16 - iter 1011/3375 - loss 0.09133619 - samples/sec: 249.92 - lr: 0.100000
263
- 2022-08-07 02:37:59,246 epoch 16 - iter 1348/3375 - loss 0.09326501 - samples/sec: 239.52 - lr: 0.100000
264
- 2022-08-07 02:38:10,240 epoch 16 - iter 1685/3375 - loss 0.09328072 - samples/sec: 245.86 - lr: 0.100000
265
- 2022-08-07 02:38:21,330 epoch 16 - iter 2022/3375 - loss 0.09375121 - samples/sec: 243.74 - lr: 0.100000
266
- 2022-08-07 02:38:32,486 epoch 16 - iter 2359/3375 - loss 0.09445046 - samples/sec: 242.34 - lr: 0.100000
267
- 2022-08-07 02:38:43,896 epoch 16 - iter 2696/3375 - loss 0.09434421 - samples/sec: 236.94 - lr: 0.100000
268
- 2022-08-07 02:38:54,862 epoch 16 - iter 3033/3375 - loss 0.09403046 - samples/sec: 246.48 - lr: 0.100000
269
- 2022-08-07 02:39:06,183 epoch 16 - iter 3370/3375 - loss 0.09453781 - samples/sec: 238.83 - lr: 0.100000
270
- 2022-08-07 02:39:06,368 ----------------------------------------------------------------------------------------------------
271
- 2022-08-07 02:39:06,369 EPOCH 16 done: loss 0.0954 - lr 0.100000
272
- 2022-08-07 02:39:06,369 BAD EPOCHS (no improvement): 0
273
- 2022-08-07 02:39:06,370 ----------------------------------------------------------------------------------------------------
274
- 2022-08-07 02:39:17,685 epoch 17 - iter 337/3375 - loss 0.09065832 - samples/sec: 238.97 - lr: 0.100000
275
- 2022-08-07 02:39:28,532 epoch 17 - iter 674/3375 - loss 0.09074058 - samples/sec: 249.20 - lr: 0.100000
276
- 2022-08-07 02:39:39,813 epoch 17 - iter 1011/3375 - loss 0.09076218 - samples/sec: 239.62 - lr: 0.100000
277
- 2022-08-07 02:39:51,097 epoch 17 - iter 1348/3375 - loss 0.09114038 - samples/sec: 239.58 - lr: 0.100000
278
- 2022-08-07 02:40:02,140 epoch 17 - iter 1685/3375 - loss 0.09364976 - samples/sec: 244.81 - lr: 0.100000
279
- 2022-08-07 02:40:13,561 epoch 17 - iter 2022/3375 - loss 0.09295760 - samples/sec: 236.71 - lr: 0.100000
280
- 2022-08-07 02:40:24,964 epoch 17 - iter 2359/3375 - loss 0.09259855 - samples/sec: 237.08 - lr: 0.100000
281
- 2022-08-07 02:40:36,010 epoch 17 - iter 2696/3375 - loss 0.09252924 - samples/sec: 244.74 - lr: 0.100000
282
- 2022-08-07 02:40:47,006 epoch 17 - iter 3033/3375 - loss 0.09270205 - samples/sec: 245.80 - lr: 0.100000
283
- 2022-08-07 02:40:58,218 epoch 17 - iter 3370/3375 - loss 0.09268398 - samples/sec: 241.11 - lr: 0.100000
284
- 2022-08-07 02:40:58,391 ----------------------------------------------------------------------------------------------------
285
- 2022-08-07 02:40:58,391 EPOCH 17 done: loss 0.0927 - lr 0.100000
286
- 2022-08-07 02:40:58,391 BAD EPOCHS (no improvement): 0
287
- 2022-08-07 02:40:58,392 ----------------------------------------------------------------------------------------------------
288
- 2022-08-07 02:41:09,599 epoch 18 - iter 337/3375 - loss 0.09055744 - samples/sec: 241.22 - lr: 0.100000
289
- 2022-08-07 02:41:20,285 epoch 18 - iter 674/3375 - loss 0.08777919 - samples/sec: 252.94 - lr: 0.100000
290
- 2022-08-07 02:41:31,419 epoch 18 - iter 1011/3375 - loss 0.08653121 - samples/sec: 242.79 - lr: 0.100000
291
- 2022-08-07 02:41:42,739 epoch 18 - iter 1348/3375 - loss 0.08740157 - samples/sec: 238.86 - lr: 0.100000
292
- 2022-08-07 02:41:53,953 epoch 18 - iter 1685/3375 - loss 0.08793633 - samples/sec: 241.06 - lr: 0.100000
293
- 2022-08-07 02:42:04,883 epoch 18 - iter 2022/3375 - loss 0.08754013 - samples/sec: 247.32 - lr: 0.100000
294
- 2022-08-07 02:42:16,283 epoch 18 - iter 2359/3375 - loss 0.08808403 - samples/sec: 237.14 - lr: 0.100000
295
- 2022-08-07 02:42:27,434 epoch 18 - iter 2696/3375 - loss 0.08880355 - samples/sec: 242.41 - lr: 0.100000
296
- 2022-08-07 02:42:38,785 epoch 18 - iter 3033/3375 - loss 0.08953809 - samples/sec: 238.13 - lr: 0.100000
297
- 2022-08-07 02:42:50,287 epoch 18 - iter 3370/3375 - loss 0.08999968 - samples/sec: 235.01 - lr: 0.100000
298
- 2022-08-07 02:42:50,435 ----------------------------------------------------------------------------------------------------
299
- 2022-08-07 02:42:50,435 EPOCH 18 done: loss 0.0900 - lr 0.100000
300
- 2022-08-07 02:42:50,435 BAD EPOCHS (no improvement): 0
301
- 2022-08-07 02:42:50,436 ----------------------------------------------------------------------------------------------------
302
- 2022-08-07 02:43:01,650 epoch 19 - iter 337/3375 - loss 0.08691442 - samples/sec: 241.09 - lr: 0.100000
303
- 2022-08-07 02:43:12,726 epoch 19 - iter 674/3375 - loss 0.08776779 - samples/sec: 244.04 - lr: 0.100000
304
- 2022-08-07 02:43:23,627 epoch 19 - iter 1011/3375 - loss 0.08694620 - samples/sec: 247.98 - lr: 0.100000
305
- 2022-08-07 02:43:34,831 epoch 19 - iter 1348/3375 - loss 0.08641312 - samples/sec: 241.26 - lr: 0.100000
306
- 2022-08-07 02:43:46,256 epoch 19 - iter 1685/3375 - loss 0.08834346 - samples/sec: 236.64 - lr: 0.100000
307
- 2022-08-07 02:43:57,309 epoch 19 - iter 2022/3375 - loss 0.08773463 - samples/sec: 244.61 - lr: 0.100000
308
- 2022-08-07 02:44:08,660 epoch 19 - iter 2359/3375 - loss 0.08783827 - samples/sec: 238.15 - lr: 0.100000
309
- 2022-08-07 02:44:19,311 epoch 19 - iter 2696/3375 - loss 0.08811852 - samples/sec: 253.71 - lr: 0.100000
310
- 2022-08-07 02:44:30,362 epoch 19 - iter 3033/3375 - loss 0.08824350 - samples/sec: 244.57 - lr: 0.100000
311
- 2022-08-07 02:44:41,601 epoch 19 - iter 3370/3375 - loss 0.08849871 - samples/sec: 240.51 - lr: 0.100000
312
- 2022-08-07 02:44:41,783 ----------------------------------------------------------------------------------------------------
313
- 2022-08-07 02:44:41,783 EPOCH 19 done: loss 0.0885 - lr 0.100000
314
- 2022-08-07 02:44:41,783 BAD EPOCHS (no improvement): 0
315
- 2022-08-07 02:44:41,783 ----------------------------------------------------------------------------------------------------
316
- 2022-08-07 02:44:52,525 epoch 20 - iter 337/3375 - loss 0.09322885 - samples/sec: 251.66 - lr: 0.100000
317
- 2022-08-07 02:45:03,893 epoch 20 - iter 674/3375 - loss 0.08928904 - samples/sec: 237.84 - lr: 0.100000
318
- 2022-08-07 02:45:15,191 epoch 20 - iter 1011/3375 - loss 0.08975760 - samples/sec: 239.28 - lr: 0.100000
319
- 2022-08-07 02:45:26,425 epoch 20 - iter 1348/3375 - loss 0.08790189 - samples/sec: 240.63 - lr: 0.100000
320
- 2022-08-07 02:45:36,609 epoch 20 - iter 1685/3375 - loss 0.08815741 - samples/sec: 265.45 - lr: 0.100000
321
- 2022-08-07 02:45:47,682 epoch 20 - iter 2022/3375 - loss 0.08809693 - samples/sec: 244.16 - lr: 0.100000
322
- 2022-08-07 02:45:58,875 epoch 20 - iter 2359/3375 - loss 0.08818872 - samples/sec: 241.50 - lr: 0.100000
323
- 2022-08-07 02:46:10,105 epoch 20 - iter 2696/3375 - loss 0.08873562 - samples/sec: 240.71 - lr: 0.100000
324
- 2022-08-07 02:46:20,950 epoch 20 - iter 3033/3375 - loss 0.08879496 - samples/sec: 249.22 - lr: 0.100000
325
- 2022-08-07 02:46:32,158 epoch 20 - iter 3370/3375 - loss 0.08832716 - samples/sec: 241.18 - lr: 0.100000
326
- 2022-08-07 02:46:32,335 ----------------------------------------------------------------------------------------------------
327
- 2022-08-07 02:46:32,335 EPOCH 20 done: loss 0.0883 - lr 0.100000
328
- 2022-08-07 02:46:32,335 BAD EPOCHS (no improvement): 0
329
- 2022-08-07 02:46:32,335 ----------------------------------------------------------------------------------------------------
330
- 2022-08-07 02:46:43,375 epoch 21 - iter 337/3375 - loss 0.09213887 - samples/sec: 244.86 - lr: 0.100000
331
- 2022-08-07 02:46:54,478 epoch 21 - iter 674/3375 - loss 0.08900913 - samples/sec: 243.46 - lr: 0.100000
332
- 2022-08-07 02:47:05,301 epoch 21 - iter 1011/3375 - loss 0.08754593 - samples/sec: 249.76 - lr: 0.100000
333
- 2022-08-07 02:47:15,433 epoch 21 - iter 1348/3375 - loss 0.08629554 - samples/sec: 266.73 - lr: 0.100000
334
- 2022-08-07 02:47:26,262 epoch 21 - iter 1685/3375 - loss 0.08524342 - samples/sec: 249.61 - lr: 0.100000
335
- 2022-08-07 02:47:37,339 epoch 21 - iter 2022/3375 - loss 0.08503406 - samples/sec: 244.03 - lr: 0.100000
336
- 2022-08-07 02:47:48,120 epoch 21 - iter 2359/3375 - loss 0.08520712 - samples/sec: 250.70 - lr: 0.100000
337
- 2022-08-07 02:47:59,174 epoch 21 - iter 2696/3375 - loss 0.08494666 - samples/sec: 244.53 - lr: 0.100000
338
- 2022-08-07 02:48:09,988 epoch 21 - iter 3033/3375 - loss 0.08480265 - samples/sec: 249.92 - lr: 0.100000
339
- 2022-08-07 02:48:21,077 epoch 21 - iter 3370/3375 - loss 0.08495562 - samples/sec: 243.77 - lr: 0.100000
340
- 2022-08-07 02:48:21,260 ----------------------------------------------------------------------------------------------------
341
- 2022-08-07 02:48:21,260 EPOCH 21 done: loss 0.0849 - lr 0.100000
342
- 2022-08-07 02:48:21,260 BAD EPOCHS (no improvement): 0
343
- 2022-08-07 02:48:21,261 ----------------------------------------------------------------------------------------------------
344
- 2022-08-07 02:48:32,111 epoch 22 - iter 337/3375 - loss 0.08295893 - samples/sec: 249.16 - lr: 0.100000
345
- 2022-08-07 02:48:43,332 epoch 22 - iter 674/3375 - loss 0.08481991 - samples/sec: 240.82 - lr: 0.100000
346
- 2022-08-07 02:48:54,762 epoch 22 - iter 1011/3375 - loss 0.08570495 - samples/sec: 236.51 - lr: 0.100000
347
- 2022-08-07 02:49:05,835 epoch 22 - iter 1348/3375 - loss 0.08393526 - samples/sec: 244.10 - lr: 0.100000
348
- 2022-08-07 02:49:15,803 epoch 22 - iter 1685/3375 - loss 0.08354373 - samples/sec: 271.08 - lr: 0.100000
349
- 2022-08-07 02:49:26,536 epoch 22 - iter 2022/3375 - loss 0.08345868 - samples/sec: 251.91 - lr: 0.100000
350
- 2022-08-07 02:49:38,171 epoch 22 - iter 2359/3375 - loss 0.08348163 - samples/sec: 232.34 - lr: 0.100000
351
- 2022-08-07 02:49:49,576 epoch 22 - iter 2696/3375 - loss 0.08383154 - samples/sec: 237.03 - lr: 0.100000
352
- 2022-08-07 02:50:00,569 epoch 22 - iter 3033/3375 - loss 0.08343660 - samples/sec: 245.95 - lr: 0.100000
353
- 2022-08-07 02:50:11,702 epoch 22 - iter 3370/3375 - loss 0.08352162 - samples/sec: 242.80 - lr: 0.100000
354
- 2022-08-07 02:50:11,847 ----------------------------------------------------------------------------------------------------
355
- 2022-08-07 02:50:11,847 EPOCH 22 done: loss 0.0835 - lr 0.100000
356
- 2022-08-07 02:50:11,847 BAD EPOCHS (no improvement): 0
357
- 2022-08-07 02:50:11,847 ----------------------------------------------------------------------------------------------------
358
- 2022-08-07 02:50:22,990 epoch 23 - iter 337/3375 - loss 0.07887801 - samples/sec: 242.63 - lr: 0.100000
359
- 2022-08-07 02:50:34,284 epoch 23 - iter 674/3375 - loss 0.08322045 - samples/sec: 239.33 - lr: 0.100000
360
- 2022-08-07 02:50:45,593 epoch 23 - iter 1011/3375 - loss 0.08177573 - samples/sec: 239.05 - lr: 0.100000
361
- 2022-08-07 02:50:56,652 epoch 23 - iter 1348/3375 - loss 0.08159359 - samples/sec: 244.44 - lr: 0.100000
362
- 2022-08-07 02:51:07,155 epoch 23 - iter 1685/3375 - loss 0.08185351 - samples/sec: 257.37 - lr: 0.100000
363
- 2022-08-07 02:51:18,142 epoch 23 - iter 2022/3375 - loss 0.08216048 - samples/sec: 246.03 - lr: 0.100000
364
- 2022-08-07 02:51:28,880 epoch 23 - iter 2359/3375 - loss 0.08232311 - samples/sec: 251.72 - lr: 0.100000
365
- 2022-08-07 02:51:40,057 epoch 23 - iter 2696/3375 - loss 0.08129492 - samples/sec: 241.84 - lr: 0.100000
366
- 2022-08-07 02:51:50,922 epoch 23 - iter 3033/3375 - loss 0.08169562 - samples/sec: 248.76 - lr: 0.100000
367
- 2022-08-07 02:52:02,160 epoch 23 - iter 3370/3375 - loss 0.08205725 - samples/sec: 240.50 - lr: 0.100000
368
- 2022-08-07 02:52:02,323 ----------------------------------------------------------------------------------------------------
369
- 2022-08-07 02:52:02,323 EPOCH 23 done: loss 0.0820 - lr 0.100000
370
- 2022-08-07 02:52:02,324 BAD EPOCHS (no improvement): 0
371
- 2022-08-07 02:52:02,324 ----------------------------------------------------------------------------------------------------
372
- 2022-08-07 02:52:13,284 epoch 24 - iter 337/3375 - loss 0.07347428 - samples/sec: 246.66 - lr: 0.100000
373
- 2022-08-07 02:52:24,344 epoch 24 - iter 674/3375 - loss 0.07465337 - samples/sec: 244.38 - lr: 0.100000
374
- 2022-08-07 02:52:35,586 epoch 24 - iter 1011/3375 - loss 0.07715712 - samples/sec: 240.49 - lr: 0.100000
375
- 2022-08-07 02:52:46,455 epoch 24 - iter 1348/3375 - loss 0.07792351 - samples/sec: 248.67 - lr: 0.100000
376
- 2022-08-07 02:52:57,159 epoch 24 - iter 1685/3375 - loss 0.07863379 - samples/sec: 252.50 - lr: 0.100000
377
- 2022-08-07 02:53:08,499 epoch 24 - iter 2022/3375 - loss 0.07926591 - samples/sec: 238.36 - lr: 0.100000
378
- 2022-08-07 02:53:18,679 epoch 24 - iter 2359/3375 - loss 0.07936523 - samples/sec: 265.47 - lr: 0.100000
379
- 2022-08-07 02:53:29,858 epoch 24 - iter 2696/3375 - loss 0.08120908 - samples/sec: 241.78 - lr: 0.100000
380
- 2022-08-07 02:53:41,047 epoch 24 - iter 3033/3375 - loss 0.08128250 - samples/sec: 241.61 - lr: 0.100000
381
- 2022-08-07 02:53:52,019 epoch 24 - iter 3370/3375 - loss 0.08094103 - samples/sec: 246.36 - lr: 0.100000
382
- 2022-08-07 02:53:52,158 ----------------------------------------------------------------------------------------------------
383
- 2022-08-07 02:53:52,159 EPOCH 24 done: loss 0.0810 - lr 0.100000
384
- 2022-08-07 02:53:52,159 BAD EPOCHS (no improvement): 0
385
- 2022-08-07 02:53:52,159 ----------------------------------------------------------------------------------------------------
386
- 2022-08-07 02:54:03,354 epoch 25 - iter 337/3375 - loss 0.08139893 - samples/sec: 241.47 - lr: 0.100000
387
- 2022-08-07 02:54:14,091 epoch 25 - iter 674/3375 - loss 0.07911841 - samples/sec: 251.69 - lr: 0.100000
388
- 2022-08-07 02:54:24,835 epoch 25 - iter 1011/3375 - loss 0.07860869 - samples/sec: 251.59 - lr: 0.100000
389
- 2022-08-07 02:54:35,909 epoch 25 - iter 1348/3375 - loss 0.07879774 - samples/sec: 244.08 - lr: 0.100000
390
- 2022-08-07 02:54:47,101 epoch 25 - iter 1685/3375 - loss 0.07789856 - samples/sec: 241.55 - lr: 0.100000
391
- 2022-08-07 02:54:58,083 epoch 25 - iter 2022/3375 - loss 0.07839394 - samples/sec: 246.13 - lr: 0.100000
392
- 2022-08-07 02:55:09,311 epoch 25 - iter 2359/3375 - loss 0.07843746 - samples/sec: 240.78 - lr: 0.100000
393
- 2022-08-07 02:55:20,397 epoch 25 - iter 2696/3375 - loss 0.07817246 - samples/sec: 243.82 - lr: 0.100000
394
- 2022-08-07 02:55:31,407 epoch 25 - iter 3033/3375 - loss 0.07825394 - samples/sec: 245.49 - lr: 0.100000
395
- 2022-08-07 02:55:42,519 epoch 25 - iter 3370/3375 - loss 0.07815000 - samples/sec: 243.29 - lr: 0.100000
396
- 2022-08-07 02:55:42,715 ----------------------------------------------------------------------------------------------------
397
- 2022-08-07 02:55:42,715 EPOCH 25 done: loss 0.0782 - lr 0.100000
398
- 2022-08-07 02:55:42,715 BAD EPOCHS (no improvement): 0
399
- 2022-08-07 02:55:43,803 ----------------------------------------------------------------------------------------------------
400
- 2022-08-07 02:55:43,803 Testing using last state of model ...
401
- 2022-08-07 02:56:08,461 Evaluating as a multi-label problem: False
402
- 2022-08-07 02:56:09,046 0.9703 0.9703 0.9703 0.9703
403
- 2022-08-07 02:56:09,046
404
  Results:
405
- - F-score (micro) 0.9703
406
- - F-score (macro) 0.8814
407
- - Accuracy 0.9703
408
 
409
  By class:
410
  precision recall f1-score support
411
 
412
- N_SING 0.9787 0.9602 0.9694 30553
413
- P 0.9567 0.9945 0.9752 9951
414
- DELM 0.9983 0.9996 0.9990 8122
415
- ADJ 0.9061 0.9466 0.9259 7466
416
- CON 0.9902 0.9809 0.9856 6823
417
- N_PL 0.9826 0.9731 0.9778 5163
418
- V_PA 0.9792 0.9819 0.9805 2873
419
- V_PRS 0.9937 0.9916 0.9926 2841
420
- NUM 0.9915 0.9982 0.9949 2232
421
- PRO 0.9694 0.9531 0.9611 2258
422
- DET 0.9583 0.9665 0.9624 1853
423
  CLITIC 1.0000 1.0000 1.0000 1259
424
- V_PP 0.9726 0.9827 0.9777 1158
425
- V_SUB 0.9871 0.9651 0.9760 1031
426
- ADV 0.8562 0.8659 0.8610 880
427
- ADV_TIME 0.9171 0.9734 0.9444 489
428
- V_AUX 0.9921 0.9947 0.9934 379
429
- ADJ_SUP 0.9925 0.9852 0.9888 270
430
- ADJ_CMPR 0.9444 0.9689 0.9565 193
431
- ADJ_INO 0.8531 0.7262 0.7846 168
432
- ADV_NEG 0.9270 0.8523 0.8881 149
433
- ADV_I 0.8976 0.8143 0.8539 140
434
- FW 0.8000 0.6179 0.6972 123
435
- ADV_COMP 0.8202 0.9605 0.8848 76
436
- ADV_LOC 0.9863 0.9863 0.9863 73
437
- V_IMP 0.7407 0.7143 0.7273 56
438
- PREV 0.8065 0.7812 0.7937 32
439
- INT 0.6111 0.4583 0.5238 24
440
- N_VOC 0.0000 0.0000 0.0000 0
441
 
442
- accuracy 0.9703 86635
443
- macro avg 0.8900 0.8756 0.8814 86635
444
- weighted avg 0.9707 0.9703 0.9703 86635
 
445
 
446
- 2022-08-07 02:56:09,046 ----------------------------------------------------------------------------------------------------
 
1
+ 2022-08-07 16:00:48,261 ----------------------------------------------------------------------------------------------------
2
+ 2022-08-07 16:00:48,267 Model: "SequenceTagger(
3
  (embeddings): StackedEmbeddings(
4
+ (list_embedding_0): WordEmbeddings('fa')
 
 
 
5
  (list_embedding_1): FlairEmbeddings(
6
  (lm): LanguageModel(
7
  (drop): Dropout(p=0.1, inplace=False)
 
21
  )
22
  (word_dropout): WordDropout(p=0.05)
23
  (locked_dropout): LockedDropout(p=0.5)
24
+ (embedding2nn): Linear(in_features=4396, out_features=4396, bias=True)
25
+ (rnn): LSTM(4396, 256, batch_first=True, bidirectional=True)
26
+ (linear): Linear(in_features=512, out_features=32, bias=True)
27
+ (beta): 1.0
28
+ (weights): None
29
+ (weight_tensor) None
30
  )"
31
+ 2022-08-07 16:00:48,272 ----------------------------------------------------------------------------------------------------
32
+ 2022-08-07 16:00:48,276 Corpus: "Corpus: 24000 train + 3000 dev + 3000 test sentences"
33
+ 2022-08-07 16:00:48,281 ----------------------------------------------------------------------------------------------------
34
+ 2022-08-07 16:00:48,282 Parameters:
35
+ 2022-08-07 16:00:48,285 - learning_rate: "0.1"
36
+ 2022-08-07 16:00:48,289 - mini_batch_size: "8"
37
+ 2022-08-07 16:00:48,293 - patience: "3"
38
+ 2022-08-07 16:00:48,295 - anneal_factor: "0.5"
39
+ 2022-08-07 16:00:48,296 - max_epochs: "5"
40
+ 2022-08-07 16:00:48,297 - shuffle: "True"
41
+ 2022-08-07 16:00:48,300 - train_with_dev: "False"
42
+ 2022-08-07 16:00:48,301 - batch_growth_annealing: "False"
43
+ 2022-08-07 16:00:48,303 ----------------------------------------------------------------------------------------------------
44
+ 2022-08-07 16:00:48,306 Model training base path: "/content/drive/MyDrive/project/data/pos/model2"
45
+ 2022-08-07 16:00:48,309 ----------------------------------------------------------------------------------------------------
46
+ 2022-08-07 16:00:48,316 Device: cuda:0
47
+ 2022-08-07 16:00:48,317 ----------------------------------------------------------------------------------------------------
48
+ 2022-08-07 16:00:48,318 Embeddings storage mode: none
49
+ 2022-08-07 16:00:48,337 ----------------------------------------------------------------------------------------------------
50
+ 2022-08-07 16:02:01,728 epoch 1 - iter 300/3000 - loss 0.75227154 - samples/sec: 32.71 - lr: 0.100000
51
+ 2022-08-07 16:03:44,240 epoch 1 - iter 600/3000 - loss 0.54616157 - samples/sec: 23.58 - lr: 0.100000
52
+ 2022-08-07 16:05:07,940 epoch 1 - iter 900/3000 - loss 0.46940731 - samples/sec: 28.91 - lr: 0.100000
53
+ 2022-08-07 16:06:48,542 epoch 1 - iter 1200/3000 - loss 0.41914715 - samples/sec: 24.03 - lr: 0.100000
54
+ 2022-08-07 16:08:31,313 epoch 1 - iter 1500/3000 - loss 0.38015901 - samples/sec: 23.52 - lr: 0.100000
55
+ 2022-08-07 16:10:05,508 epoch 1 - iter 1800/3000 - loss 0.35604709 - samples/sec: 25.67 - lr: 0.100000
56
+ 2022-08-07 16:11:31,898 epoch 1 - iter 2100/3000 - loss 0.33691470 - samples/sec: 28.01 - lr: 0.100000
57
+ 2022-08-07 16:13:00,338 epoch 1 - iter 2400/3000 - loss 0.32109903 - samples/sec: 27.35 - lr: 0.100000
58
+ 2022-08-07 16:14:32,548 epoch 1 - iter 2700/3000 - loss 0.31528796 - samples/sec: 26.23 - lr: 0.100000
59
+ 2022-08-07 16:16:09,123 epoch 1 - iter 3000/3000 - loss 0.30213703 - samples/sec: 25.03 - lr: 0.100000
60
+ 2022-08-07 16:16:09,831 ----------------------------------------------------------------------------------------------------
61
+ 2022-08-07 16:16:09,836 EPOCH 1 done: loss 0.3021 - lr 0.1000000
62
+ 2022-08-07 16:21:08,895 DEV : loss 0.1289350390434265 - f1-score (micro avg) 0.9601
63
+ 2022-08-07 16:21:08,937 BAD EPOCHS (no improvement): 0
64
+ 2022-08-07 16:21:10,769 saving best model
65
+ 2022-08-07 16:21:12,532 ----------------------------------------------------------------------------------------------------
66
+ 2022-08-07 16:22:54,846 epoch 2 - iter 300/3000 - loss 0.21020090 - samples/sec: 23.46 - lr: 0.100000
67
+ 2022-08-07 16:24:33,507 epoch 2 - iter 600/3000 - loss 0.20664426 - samples/sec: 24.50 - lr: 0.100000
68
+ 2022-08-07 16:26:17,056 epoch 2 - iter 900/3000 - loss 0.20271364 - samples/sec: 23.33 - lr: 0.100000
69
+ 2022-08-07 16:27:59,228 epoch 2 - iter 1200/3000 - loss 0.20055706 - samples/sec: 23.65 - lr: 0.100000
70
+ 2022-08-07 16:29:39,722 epoch 2 - iter 1500/3000 - loss 0.19912427 - samples/sec: 24.05 - lr: 0.100000
71
+ 2022-08-07 16:31:27,754 epoch 2 - iter 1800/3000 - loss 0.19760227 - samples/sec: 22.36 - lr: 0.100000
72
+ 2022-08-07 16:33:12,162 epoch 2 - iter 2100/3000 - loss 0.19795635 - samples/sec: 23.14 - lr: 0.100000
73
+ 2022-08-07 16:34:53,586 epoch 2 - iter 2400/3000 - loss 0.19672791 - samples/sec: 23.84 - lr: 0.100000
74
+ 2022-08-07 16:36:42,505 epoch 2 - iter 2700/3000 - loss 0.19643492 - samples/sec: 22.19 - lr: 0.100000
75
+ 2022-08-07 16:38:22,496 epoch 2 - iter 3000/3000 - loss 0.19530593 - samples/sec: 24.17 - lr: 0.100000
76
+ 2022-08-07 16:38:23,157 ----------------------------------------------------------------------------------------------------
77
+ 2022-08-07 16:38:23,162 EPOCH 2 done: loss 0.1953 - lr 0.1000000
78
+ 2022-08-07 16:43:34,928 DEV : loss 0.10149012506008148 - f1-score (micro avg) 0.9708
79
+ 2022-08-07 16:43:34,973 BAD EPOCHS (no improvement): 0
80
+ 2022-08-07 16:43:36,767 saving best model
81
+ 2022-08-07 16:43:38,486 ----------------------------------------------------------------------------------------------------
82
+ 2022-08-07 16:45:23,089 epoch 3 - iter 300/3000 - loss 0.17774341 - samples/sec: 22.95 - lr: 0.100000
83
+ 2022-08-07 16:47:08,214 epoch 3 - iter 600/3000 - loss 0.17596867 - samples/sec: 22.98 - lr: 0.100000
84
+ 2022-08-07 16:48:50,711 epoch 3 - iter 900/3000 - loss 0.17436321 - samples/sec: 23.58 - lr: 0.100000
85
+ 2022-08-07 16:50:35,039 epoch 3 - iter 1200/3000 - loss 0.17306311 - samples/sec: 23.16 - lr: 0.100000
86
+ 2022-08-07 16:52:20,808 epoch 3 - iter 1500/3000 - loss 0.17261464 - samples/sec: 22.84 - lr: 0.100000
87
+ 2022-08-07 16:54:02,750 epoch 3 - iter 1800/3000 - loss 0.17438407 - samples/sec: 23.71 - lr: 0.100000
88
+ 2022-08-07 16:55:42,154 epoch 3 - iter 2100/3000 - loss 0.17363800 - samples/sec: 24.31 - lr: 0.100000
89
+ 2022-08-07 16:57:21,978 epoch 3 - iter 2400/3000 - loss 0.17156485 - samples/sec: 24.21 - lr: 0.100000
90
+ 2022-08-07 16:59:05,968 epoch 3 - iter 2700/3000 - loss 0.17042576 - samples/sec: 23.23 - lr: 0.100000
91
+ 2022-08-07 17:00:46,166 epoch 3 - iter 3000/3000 - loss 0.16937353 - samples/sec: 24.12 - lr: 0.100000
92
+ 2022-08-07 17:00:46,857 ----------------------------------------------------------------------------------------------------
93
+ 2022-08-07 17:00:46,860 EPOCH 3 done: loss 0.1694 - lr 0.1000000
94
+ 2022-08-07 17:05:58,652 DEV : loss 0.09684865176677704 - f1-score (micro avg) 0.9731
95
+ 2022-08-07 17:05:58,703 BAD EPOCHS (no improvement): 0
96
+ 2022-08-07 17:06:00,477 saving best model
97
+ 2022-08-07 17:06:02,321 ----------------------------------------------------------------------------------------------------
98
+ 2022-08-07 17:07:44,646 epoch 4 - iter 300/3000 - loss 0.16212096 - samples/sec: 23.46 - lr: 0.100000
99
+ 2022-08-07 17:09:25,119 epoch 4 - iter 600/3000 - loss 0.15843816 - samples/sec: 24.05 - lr: 0.100000
100
+ 2022-08-07 17:11:07,080 epoch 4 - iter 900/3000 - loss 0.15900626 - samples/sec: 23.70 - lr: 0.100000
101
+ 2022-08-07 17:12:47,149 epoch 4 - iter 1200/3000 - loss 0.15764029 - samples/sec: 24.15 - lr: 0.100000
102
+ 2022-08-07 17:14:33,737 epoch 4 - iter 1500/3000 - loss 0.16000098 - samples/sec: 22.66 - lr: 0.100000
103
+ 2022-08-07 17:16:21,024 epoch 4 - iter 1800/3000 - loss 0.15931205 - samples/sec: 22.52 - lr: 0.100000
104
+ 2022-08-07 17:18:01,785 epoch 4 - iter 2100/3000 - loss 0.15961928 - samples/sec: 23.99 - lr: 0.100000
105
+ 2022-08-07 17:19:44,524 epoch 4 - iter 2400/3000 - loss 0.15845056 - samples/sec: 23.52 - lr: 0.100000
106
+ 2022-08-07 17:21:27,429 epoch 4 - iter 2700/3000 - loss 0.15771950 - samples/sec: 23.49 - lr: 0.100000
107
+ 2022-08-07 17:23:10,018 epoch 4 - iter 3000/3000 - loss 0.15777116 - samples/sec: 23.56 - lr: 0.100000
108
+ 2022-08-07 17:23:10,788 ----------------------------------------------------------------------------------------------------
109
+ 2022-08-07 17:23:10,794 EPOCH 4 done: loss 0.1578 - lr 0.1000000
110
+ 2022-08-07 17:28:23,406 DEV : loss 0.09011354297399521 - f1-score (micro avg) 0.9744
111
+ 2022-08-07 17:28:23,451 BAD EPOCHS (no improvement): 0
112
+ 2022-08-07 17:28:25,515 saving best model
113
+ 2022-08-07 17:28:27,346 ----------------------------------------------------------------------------------------------------
114
+ 2022-08-07 17:30:06,455 epoch 5 - iter 300/3000 - loss 0.14466099 - samples/sec: 24.22 - lr: 0.100000
115
+ 2022-08-07 17:31:44,351 epoch 5 - iter 600/3000 - loss 0.14401223 - samples/sec: 24.70 - lr: 0.100000
116
+ 2022-08-07 17:33:27,083 epoch 5 - iter 900/3000 - loss 0.14768050 - samples/sec: 23.53 - lr: 0.100000
117
+ 2022-08-07 17:35:07,577 epoch 5 - iter 1200/3000 - loss 0.14646819 - samples/sec: 24.05 - lr: 0.100000
118
+ 2022-08-07 17:36:47,275 epoch 5 - iter 1500/3000 - loss 0.14604558 - samples/sec: 24.25 - lr: 0.100000
119
+ 2022-08-07 17:38:24,129 epoch 5 - iter 1800/3000 - loss 0.14788483 - samples/sec: 24.96 - lr: 0.100000
120
+ 2022-08-07 17:40:04,518 epoch 5 - iter 2100/3000 - loss 0.14695063 - samples/sec: 24.08 - lr: 0.100000
121
+ 2022-08-07 17:41:51,964 epoch 5 - iter 2400/3000 - loss 0.14697433 - samples/sec: 22.49 - lr: 0.100000
122
+ 2022-08-07 17:43:32,173 epoch 5 - iter 2700/3000 - loss 0.14745015 - samples/sec: 24.12 - lr: 0.100000
123
+ 2022-08-07 17:45:17,557 epoch 5 - iter 3000/3000 - loss 0.14917362 - samples/sec: 22.93 - lr: 0.100000
124
+ 2022-08-07 17:45:18,255 ----------------------------------------------------------------------------------------------------
125
+ 2022-08-07 17:45:18,263 EPOCH 5 done: loss 0.1492 - lr 0.1000000
126
+ 2022-08-07 17:50:33,128 DEV : loss 0.08973350375890732 - f1-score (micro avg) 0.9746
127
+ 2022-08-07 17:50:33,176 BAD EPOCHS (no improvement): 0
128
+ 2022-08-07 17:50:34,869 saving best model
129
+ 2022-08-07 17:50:38,774 ----------------------------------------------------------------------------------------------------
130
+ 2022-08-07 17:50:38,811 loading file /content/drive/MyDrive/project/data/pos/model2/best-model.pt
131
+ 2022-08-07 17:55:05,420 0.9637 0.9637 0.9637 0.9637
132
+ 2022-08-07 17:55:05,422
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
133
  Results:
134
+ - F-score (micro) 0.9637
135
+ - F-score (macro) 0.8989
136
+ - Accuracy 0.9637
137
 
138
  By class:
139
  precision recall f1-score support
140
 
141
+ N_SING 0.9724 0.9521 0.9621 30553
142
+ P 0.9577 0.9919 0.9745 9951
143
+ DELM 0.9982 0.9996 0.9989 8122
144
+ ADJ 0.8768 0.9334 0.9042 7466
145
+ CON 0.9905 0.9786 0.9845 6823
146
+ N_PL 0.9719 0.9644 0.9681 5163
147
+ V_PA 0.9753 0.9756 0.9755 2873
148
+ V_PRS 0.9922 0.9852 0.9887 2841
149
+ NUM 0.9907 0.9982 0.9944 2232
150
+ PRO 0.9823 0.9349 0.9580 2258
151
+ DET 0.9429 0.9800 0.9611 1853
152
  CLITIC 1.0000 1.0000 1.0000 1259
153
+ V_PP 0.9398 0.9836 0.9612 1158
154
+ V_SUB 0.9746 0.9680 0.9713 1031
155
+ ADV 0.8180 0.8375 0.8276 880
156
+ ADV_TIME 0.9238 0.9673 0.9451 489
157
+ V_AUX 0.9947 0.9947 0.9947 379
158
+ ADJ_SUP 0.9925 0.9815 0.9870 270
159
+ ADJ_CMPR 0.9372 0.9275 0.9323 193
160
+ ADV_NEG 0.9071 0.8523 0.8789 149
161
+ ADV_I 0.8345 0.8286 0.8315 140
162
+ ADJ_INO 0.8846 0.5476 0.6765 168
163
+ FW 0.8442 0.5285 0.6500 123
164
+ ADV_COMP 0.8072 0.8816 0.8428 76
165
+ ADV_LOC 0.9342 0.9726 0.9530 73
166
+ V_IMP 0.7826 0.6429 0.7059 56
167
+ PREV 0.8276 0.7500 0.7869 32
168
+ INT 0.8333 0.4167 0.5556 24
 
169
 
170
+ micro avg 0.9637 0.9637 0.9637 86635
171
+ macro avg 0.9245 0.8848 0.8989 86635
172
+ weighted avg 0.9643 0.9637 0.9637 86635
173
+ samples avg 0.9637 0.9637 0.9637 86635
174
 
175
+ 2022-08-07 17:55:05,427 ----------------------------------------------------------------------------------------------------