joe611 commited on
Commit
972d418
·
verified ·
1 Parent(s): 4891f1d

End of training

Browse files
README.md CHANGED
@@ -16,7 +16,7 @@ should probably proofread and complete it, then remove this comment. -->
16
 
17
  This model is a fine-tuned version of [facebook/detr-resnet-50](https://huggingface.co/facebook/detr-resnet-50) on the None dataset.
18
  It achieves the following results on the evaluation set:
19
- - Loss: 0.1196
20
 
21
  ## Model description
22
 
@@ -45,128 +45,128 @@ The following hyperparameters were used during training:
45
 
46
  ### Training results
47
 
48
- | Training Loss | Epoch | Step | Validation Loss |
49
- |:-------------:|:-----:|:-----:|:---------------:|
50
- | 0.9349 | 1.0 | 116 | 0.7539 |
51
- | 0.5649 | 2.0 | 232 | 0.3776 |
52
- | 0.4438 | 3.0 | 348 | 0.6132 |
53
- | 0.4345 | 4.0 | 464 | 0.3365 |
54
- | 0.3976 | 5.0 | 580 | 0.3252 |
55
- | 0.403 | 6.0 | 696 | 0.3667 |
56
- | 0.3751 | 7.0 | 812 | 0.2778 |
57
- | 0.3511 | 8.0 | 928 | 0.2519 |
58
- | 0.3696 | 9.0 | 1044 | 0.2642 |
59
- | 0.3251 | 10.0 | 1160 | 0.3030 |
60
- | 0.3524 | 11.0 | 1276 | 0.2610 |
61
- | 0.3643 | 12.0 | 1392 | 0.2414 |
62
- | 0.3125 | 13.0 | 1508 | 0.2545 |
63
- | 0.325 | 14.0 | 1624 | 0.3899 |
64
- | 0.3124 | 15.0 | 1740 | 0.2996 |
65
- | 0.328 | 16.0 | 1856 | 0.2277 |
66
- | 0.2735 | 17.0 | 1972 | 0.2658 |
67
- | 0.3105 | 18.0 | 2088 | 0.2262 |
68
- | 0.2852 | 19.0 | 2204 | 0.2434 |
69
- | 0.3124 | 20.0 | 2320 | 0.3230 |
70
- | 0.2979 | 21.0 | 2436 | 0.2338 |
71
- | 0.2857 | 22.0 | 2552 | 0.2319 |
72
- | 0.3598 | 23.0 | 2668 | 0.2157 |
73
- | 0.352 | 24.0 | 2784 | 0.2456 |
74
- | 0.3087 | 25.0 | 2900 | 0.2397 |
75
- | 0.3206 | 26.0 | 3016 | 0.2461 |
76
- | 0.2837 | 27.0 | 3132 | 0.1970 |
77
- | 0.2981 | 28.0 | 3248 | 0.2741 |
78
- | 0.2845 | 29.0 | 3364 | 0.2312 |
79
- | 0.2768 | 30.0 | 3480 | 0.2197 |
80
- | 0.2806 | 31.0 | 3596 | 0.2411 |
81
- | 0.2635 | 32.0 | 3712 | 0.2112 |
82
- | 0.2908 | 33.0 | 3828 | 0.2023 |
83
- | 0.2704 | 34.0 | 3944 | 0.2480 |
84
- | 0.349 | 35.0 | 4060 | 0.2171 |
85
- | 0.2829 | 36.0 | 4176 | 0.2516 |
86
- | 0.3237 | 37.0 | 4292 | 0.2373 |
87
- | 0.2747 | 38.0 | 4408 | 0.2233 |
88
- | 0.3058 | 39.0 | 4524 | 0.2511 |
89
- | 0.4718 | 40.0 | 4640 | 0.3368 |
90
- | 0.2992 | 41.0 | 4756 | 0.2639 |
91
- | 0.285 | 42.0 | 4872 | 0.2716 |
92
- | 0.2702 | 43.0 | 4988 | 0.2264 |
93
- | 0.2905 | 44.0 | 5104 | 0.1958 |
94
- | 0.2815 | 45.0 | 5220 | 0.2076 |
95
- | 0.2806 | 46.0 | 5336 | 0.2315 |
96
- | 0.2503 | 47.0 | 5452 | 0.1862 |
97
- | 0.258 | 48.0 | 5568 | 0.2162 |
98
- | 0.2413 | 49.0 | 5684 | 0.1840 |
99
- | 0.2348 | 50.0 | 5800 | 0.1666 |
100
- | 0.2374 | 51.0 | 5916 | 0.2053 |
101
- | 0.24 | 52.0 | 6032 | 0.1717 |
102
- | 0.2306 | 53.0 | 6148 | 0.1881 |
103
- | 0.2398 | 54.0 | 6264 | 0.1845 |
104
- | 0.2363 | 55.0 | 6380 | 0.1764 |
105
- | 0.2249 | 56.0 | 6496 | 0.1942 |
106
- | 0.2154 | 57.0 | 6612 | 0.1945 |
107
- | 0.2348 | 58.0 | 6728 | 0.2108 |
108
- | 0.2349 | 59.0 | 6844 | 0.1930 |
109
- | 0.2294 | 60.0 | 6960 | 0.1902 |
110
- | 0.2155 | 61.0 | 7076 | 0.2001 |
111
- | 0.2197 | 62.0 | 7192 | 0.1737 |
112
- | 0.2271 | 63.0 | 7308 | 0.1624 |
113
- | 0.215 | 64.0 | 7424 | 0.1705 |
114
- | 0.2284 | 65.0 | 7540 | 0.1554 |
115
- | 0.2134 | 66.0 | 7656 | 0.1680 |
116
- | 0.2182 | 67.0 | 7772 | 0.1682 |
117
- | 0.2088 | 68.0 | 7888 | 0.1448 |
118
- | 0.2023 | 69.0 | 8004 | 0.1507 |
119
- | 0.2115 | 70.0 | 8120 | 0.1836 |
120
- | 0.202 | 71.0 | 8236 | 0.1779 |
121
- | 0.1923 | 72.0 | 8352 | 0.1594 |
122
- | 0.1993 | 73.0 | 8468 | 0.1700 |
123
- | 0.2003 | 74.0 | 8584 | 0.1587 |
124
- | 0.1975 | 75.0 | 8700 | 0.1667 |
125
- | 0.1996 | 76.0 | 8816 | 0.1637 |
126
- | 0.1933 | 77.0 | 8932 | 0.1344 |
127
- | 0.1884 | 78.0 | 9048 | 0.1497 |
128
- | 0.1912 | 79.0 | 9164 | 0.1571 |
129
- | 0.191 | 80.0 | 9280 | 0.1426 |
130
- | 0.1866 | 81.0 | 9396 | 0.1529 |
131
- | 0.1859 | 82.0 | 9512 | 0.1494 |
132
- | 0.183 | 83.0 | 9628 | 0.1508 |
133
- | 0.182 | 84.0 | 9744 | 0.1482 |
134
- | 0.171 | 85.0 | 9860 | 0.1662 |
135
- | 0.1773 | 86.0 | 9976 | 0.1561 |
136
- | 0.1742 | 87.0 | 10092 | 0.1514 |
137
- | 0.1778 | 88.0 | 10208 | 0.1371 |
138
- | 0.1721 | 89.0 | 10324 | 0.1426 |
139
- | 0.1725 | 90.0 | 10440 | 0.1554 |
140
- | 0.1665 | 91.0 | 10556 | 0.1494 |
141
- | 0.1739 | 92.0 | 10672 | 0.1423 |
142
- | 0.1688 | 93.0 | 10788 | 0.1467 |
143
- | 0.1706 | 94.0 | 10904 | 0.1267 |
144
- | 0.1715 | 95.0 | 11020 | 0.1383 |
145
- | 0.1684 | 96.0 | 11136 | 0.1357 |
146
- | 0.1699 | 97.0 | 11252 | 0.1464 |
147
- | 0.172 | 98.0 | 11368 | 0.1429 |
148
- | 0.1673 | 99.0 | 11484 | 0.1387 |
149
- | 0.166 | 100.0 | 11600 | 0.1369 |
150
- | 0.1655 | 101.0 | 11716 | 0.1272 |
151
- | 0.1654 | 102.0 | 11832 | 0.1237 |
152
- | 0.1625 | 103.0 | 11948 | 0.1321 |
153
- | 0.1622 | 104.0 | 12064 | 0.1275 |
154
- | 0.1606 | 105.0 | 12180 | 0.1250 |
155
- | 0.1603 | 106.0 | 12296 | 0.1293 |
156
- | 0.1622 | 107.0 | 12412 | 0.1275 |
157
- | 0.1607 | 108.0 | 12528 | 0.1238 |
158
- | 0.1593 | 109.0 | 12644 | 0.1264 |
159
- | 0.1568 | 110.0 | 12760 | 0.1251 |
160
- | 0.1589 | 111.0 | 12876 | 0.1253 |
161
- | 0.1576 | 112.0 | 12992 | 0.1246 |
162
- | 0.1539 | 113.0 | 13108 | 0.1223 |
163
- | 0.1583 | 114.0 | 13224 | 0.1228 |
164
- | 0.1601 | 115.0 | 13340 | 0.1217 |
165
- | 0.1628 | 116.0 | 13456 | 0.1222 |
166
- | 0.1542 | 117.0 | 13572 | 0.1214 |
167
- | 0.1559 | 118.0 | 13688 | 0.1213 |
168
- | 0.1598 | 119.0 | 13804 | 0.1217 |
169
- | 0.1554 | 120.0 | 13920 | 0.1196 |
170
 
171
 
172
  ### Framework versions
 
16
 
17
  This model is a fine-tuned version of [facebook/detr-resnet-50](https://huggingface.co/facebook/detr-resnet-50) on the None dataset.
18
  It achieves the following results on the evaluation set:
19
+ - Loss: 0.1949
20
 
21
  ## Model description
22
 
 
45
 
46
  ### Training results
47
 
48
+ | Training Loss | Epoch | Step | Validation Loss |
49
+ |:-------------:|:-----:|:----:|:---------------:|
50
+ | No log | 1.0 | 12 | 1.3837 |
51
+ | No log | 2.0 | 24 | 0.9192 |
52
+ | 1.2199 | 3.0 | 36 | 0.9478 |
53
+ | 1.2199 | 4.0 | 48 | 0.8188 |
54
+ | 0.9301 | 5.0 | 60 | 0.8648 |
55
+ | 0.9301 | 6.0 | 72 | 0.7913 |
56
+ | 0.9301 | 7.0 | 84 | 0.8269 |
57
+ | 0.834 | 8.0 | 96 | 0.7546 |
58
+ | 0.834 | 9.0 | 108 | 0.7128 |
59
+ | 0.7676 | 10.0 | 120 | 0.6706 |
60
+ | 0.7676 | 11.0 | 132 | 0.6042 |
61
+ | 0.7676 | 12.0 | 144 | 0.5586 |
62
+ | 0.6807 | 13.0 | 156 | 0.5129 |
63
+ | 0.6807 | 14.0 | 168 | 0.4815 |
64
+ | 0.5846 | 15.0 | 180 | 0.4724 |
65
+ | 0.5846 | 16.0 | 192 | 0.4970 |
66
+ | 0.5846 | 17.0 | 204 | 0.4900 |
67
+ | 0.5437 | 18.0 | 216 | 0.4985 |
68
+ | 0.5437 | 19.0 | 228 | 0.6295 |
69
+ | 0.5232 | 20.0 | 240 | 0.5023 |
70
+ | 0.5232 | 21.0 | 252 | 0.4312 |
71
+ | 0.5232 | 22.0 | 264 | 0.4583 |
72
+ | 0.5147 | 23.0 | 276 | 0.4499 |
73
+ | 0.5147 | 24.0 | 288 | 0.3438 |
74
+ | 0.4613 | 25.0 | 300 | 0.3953 |
75
+ | 0.4613 | 26.0 | 312 | 0.3916 |
76
+ | 0.4613 | 27.0 | 324 | 0.4285 |
77
+ | 0.4288 | 28.0 | 336 | 0.3532 |
78
+ | 0.4288 | 29.0 | 348 | 0.3513 |
79
+ | 0.4251 | 30.0 | 360 | 0.3761 |
80
+ | 0.4251 | 31.0 | 372 | 0.3183 |
81
+ | 0.4251 | 32.0 | 384 | 0.3419 |
82
+ | 0.3963 | 33.0 | 396 | 0.3186 |
83
+ | 0.3963 | 34.0 | 408 | 0.2799 |
84
+ | 0.3684 | 35.0 | 420 | 0.3688 |
85
+ | 0.3684 | 36.0 | 432 | 0.4035 |
86
+ | 0.3684 | 37.0 | 444 | 0.3491 |
87
+ | 0.4062 | 38.0 | 456 | 0.3147 |
88
+ | 0.4062 | 39.0 | 468 | 0.3333 |
89
+ | 0.3745 | 40.0 | 480 | 0.2822 |
90
+ | 0.3745 | 41.0 | 492 | 0.2734 |
91
+ | 0.3745 | 42.0 | 504 | 0.2816 |
92
+ | 0.3461 | 43.0 | 516 | 0.3289 |
93
+ | 0.3461 | 44.0 | 528 | 0.3538 |
94
+ | 0.3707 | 45.0 | 540 | 0.2969 |
95
+ | 0.3707 | 46.0 | 552 | 0.3335 |
96
+ | 0.3707 | 47.0 | 564 | 0.3201 |
97
+ | 0.3906 | 48.0 | 576 | 0.3262 |
98
+ | 0.3906 | 49.0 | 588 | 0.3213 |
99
+ | 0.3622 | 50.0 | 600 | 0.2825 |
100
+ | 0.3622 | 51.0 | 612 | 0.3111 |
101
+ | 0.3622 | 52.0 | 624 | 0.2814 |
102
+ | 0.336 | 53.0 | 636 | 0.3242 |
103
+ | 0.336 | 54.0 | 648 | 0.2615 |
104
+ | 0.3326 | 55.0 | 660 | 0.3107 |
105
+ | 0.3326 | 56.0 | 672 | 0.2904 |
106
+ | 0.3326 | 57.0 | 684 | 0.2967 |
107
+ | 0.3407 | 58.0 | 696 | 0.2818 |
108
+ | 0.3407 | 59.0 | 708 | 0.2759 |
109
+ | 0.3467 | 60.0 | 720 | 0.2862 |
110
+ | 0.3467 | 61.0 | 732 | 0.3529 |
111
+ | 0.3467 | 62.0 | 744 | 0.3559 |
112
+ | 0.354 | 63.0 | 756 | 0.2403 |
113
+ | 0.354 | 64.0 | 768 | 0.2815 |
114
+ | 0.3237 | 65.0 | 780 | 0.2819 |
115
+ | 0.3237 | 66.0 | 792 | 0.2476 |
116
+ | 0.3237 | 67.0 | 804 | 0.3193 |
117
+ | 0.3182 | 68.0 | 816 | 0.2444 |
118
+ | 0.3182 | 69.0 | 828 | 0.2510 |
119
+ | 0.3151 | 70.0 | 840 | 0.2951 |
120
+ | 0.3151 | 71.0 | 852 | 0.2389 |
121
+ | 0.3151 | 72.0 | 864 | 0.2657 |
122
+ | 0.3173 | 73.0 | 876 | 0.2783 |
123
+ | 0.3173 | 74.0 | 888 | 0.2791 |
124
+ | 0.3283 | 75.0 | 900 | 0.2445 |
125
+ | 0.3283 | 76.0 | 912 | 0.2507 |
126
+ | 0.3283 | 77.0 | 924 | 0.2778 |
127
+ | 0.3041 | 78.0 | 936 | 0.2471 |
128
+ | 0.3041 | 79.0 | 948 | 0.2219 |
129
+ | 0.2925 | 80.0 | 960 | 0.2767 |
130
+ | 0.2925 | 81.0 | 972 | 0.3046 |
131
+ | 0.2925 | 82.0 | 984 | 0.2837 |
132
+ | 0.3112 | 83.0 | 996 | 0.2710 |
133
+ | 0.3112 | 84.0 | 1008 | 0.2399 |
134
+ | 0.282 | 85.0 | 1020 | 0.2388 |
135
+ | 0.282 | 86.0 | 1032 | 0.2401 |
136
+ | 0.282 | 87.0 | 1044 | 0.2302 |
137
+ | 0.2806 | 88.0 | 1056 | 0.1975 |
138
+ | 0.2806 | 89.0 | 1068 | 0.2154 |
139
+ | 0.271 | 90.0 | 1080 | 0.1875 |
140
+ | 0.271 | 91.0 | 1092 | 0.2032 |
141
+ | 0.271 | 92.0 | 1104 | 0.2198 |
142
+ | 0.2695 | 93.0 | 1116 | 0.2018 |
143
+ | 0.2695 | 94.0 | 1128 | 0.2124 |
144
+ | 0.2593 | 95.0 | 1140 | 0.2150 |
145
+ | 0.2593 | 96.0 | 1152 | 0.1841 |
146
+ | 0.2593 | 97.0 | 1164 | 0.2062 |
147
+ | 0.2643 | 98.0 | 1176 | 0.1977 |
148
+ | 0.2643 | 99.0 | 1188 | 0.1847 |
149
+ | 0.2508 | 100.0 | 1200 | 0.1939 |
150
+ | 0.2508 | 101.0 | 1212 | 0.2070 |
151
+ | 0.2508 | 102.0 | 1224 | 0.1943 |
152
+ | 0.2547 | 103.0 | 1236 | 0.1911 |
153
+ | 0.2547 | 104.0 | 1248 | 0.1922 |
154
+ | 0.2512 | 105.0 | 1260 | 0.1988 |
155
+ | 0.2512 | 106.0 | 1272 | 0.1968 |
156
+ | 0.2512 | 107.0 | 1284 | 0.1984 |
157
+ | 0.2465 | 108.0 | 1296 | 0.2030 |
158
+ | 0.2465 | 109.0 | 1308 | 0.1995 |
159
+ | 0.2428 | 110.0 | 1320 | 0.1948 |
160
+ | 0.2428 | 111.0 | 1332 | 0.1969 |
161
+ | 0.2428 | 112.0 | 1344 | 0.1969 |
162
+ | 0.2432 | 113.0 | 1356 | 0.1943 |
163
+ | 0.2432 | 114.0 | 1368 | 0.1949 |
164
+ | 0.2428 | 115.0 | 1380 | 0.1930 |
165
+ | 0.2428 | 116.0 | 1392 | 0.1924 |
166
+ | 0.2428 | 117.0 | 1404 | 0.1948 |
167
+ | 0.2452 | 118.0 | 1416 | 0.1967 |
168
+ | 0.2452 | 119.0 | 1428 | 0.1943 |
169
+ | 0.245 | 120.0 | 1440 | 0.1949 |
170
 
171
 
172
  ### Framework versions
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e84f6aa39bdcba8e0c24b9152570fe23ce6167492fd87e98449eb2affff65cf8
3
  size 166494824
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:819508c2e19323c1730344102ec8cad7c8c5034566bf4ac93f7e3d0eddb9ed8a
3
  size 166494824
runs/Sep24_17-50-01_484589858e28/events.out.tfevents.1727200203.484589858e28.189.1 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:881fb2a2db24186bc49792d513fa58684bedf64418ee8fca66be5b8c5295787e
3
- size 47884
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e5023f7e6c93f29a4ad4db93ed012172b01539787d55d0100051a9edf1c84b6d
3
+ size 48509