twdent commited on
Commit
a193c32
1 Parent(s): a56ee6c

End of training

Browse files
Files changed (4) hide show
  1. README.md +51 -55
  2. config.json +5 -7
  3. pytorch_model.bin +2 -2
  4. training_args.bin +1 -1
README.md CHANGED
@@ -17,18 +17,14 @@ should probably proofread and complete it, then remove this comment. -->
17
 
18
  This model is a fine-tuned version of [nvidia/mit-b0](https://huggingface.co/nvidia/mit-b0) on the twdent/HikingHD dataset.
19
  It achieves the following results on the evaluation set:
20
- - Loss: 0.0985
21
- - Mean Iou: 0.9498
22
- - Mean Accuracy: 0.9737
23
- - Overall Accuracy: 0.9746
24
- - Accuracy Unlabeled: nan
25
- - Accuracy Traversable: 0.9658
26
- - Accuracy Non-traversable: 0.9815
27
- - Iou Unlabeled: nan
28
- - Iou Traversable: 0.9435
29
- - Iou Non-traversable: 0.9560
30
- - Local tests:
31
- - -Average inference time: 0.20031250105963813
32
 
33
  ## Model description
34
 
@@ -57,50 +53,50 @@ The following hyperparameters were used during training:
57
 
58
  ### Training results
59
 
60
- | Training Loss | Epoch | Step | Validation Loss | Mean Iou | Mean Accuracy | Overall Accuracy | Accuracy Unlabeled | Accuracy Traversable | Accuracy Non-traversable | Iou Unlabeled | Iou Traversable | Iou Non-traversable |
61
- |:-------------:|:-----:|:----:|:---------------:|:--------:|:-------------:|:----------------:|:------------------:|:--------------------:|:------------------------:|:-------------:|:---------------:|:-------------------:|
62
- | 0.4142 | 1.33 | 20 | 0.6144 | 0.6186 | 0.9598 | 0.9615 | nan | 0.9452 | 0.9743 | 0.0 | 0.9184 | 0.9373 |
63
- | 0.497 | 2.67 | 40 | 0.2661 | 0.6281 | 0.9718 | 0.9705 | nan | 0.9825 | 0.9612 | 0.0 | 0.9361 | 0.9482 |
64
- | 0.2705 | 4.0 | 60 | 0.2025 | 0.6284 | 0.9698 | 0.9709 | nan | 0.9603 | 0.9792 | 0.0 | 0.9355 | 0.9497 |
65
- | 0.1683 | 5.33 | 80 | 0.1914 | 0.6274 | 0.9704 | 0.9701 | nan | 0.9730 | 0.9678 | 0.0 | 0.9345 | 0.9478 |
66
- | 0.1744 | 6.67 | 100 | 0.1896 | 0.6219 | 0.9630 | 0.9659 | nan | 0.9393 | 0.9867 | 0.0 | 0.9236 | 0.9420 |
67
- | 0.1522 | 8.0 | 120 | 0.1575 | 0.9399 | 0.9680 | 0.9695 | nan | 0.9555 | 0.9805 | nan | 0.9322 | 0.9475 |
68
- | 0.167 | 9.33 | 140 | 0.1299 | 0.9528 | 0.9765 | 0.9761 | nan | 0.9793 | 0.9737 | nan | 0.9474 | 0.9582 |
69
- | 0.1733 | 10.67 | 160 | 0.1385 | 0.6274 | 0.9696 | 0.9701 | nan | 0.9659 | 0.9734 | 0.0 | 0.9340 | 0.9481 |
70
- | 0.0839 | 12.0 | 180 | 0.1215 | 0.9487 | 0.9732 | 0.9741 | nan | 0.9662 | 0.9802 | nan | 0.9424 | 0.9550 |
71
- | 0.1237 | 13.33 | 200 | 0.1326 | 0.9377 | 0.9666 | 0.9684 | nan | 0.9516 | 0.9815 | nan | 0.9296 | 0.9458 |
72
- | 0.0814 | 14.67 | 220 | 0.1162 | 0.9459 | 0.9710 | 0.9727 | nan | 0.9568 | 0.9851 | nan | 0.9389 | 0.9529 |
73
- | 0.0823 | 16.0 | 240 | 0.1176 | 0.9438 | 0.9703 | 0.9716 | nan | 0.9600 | 0.9806 | nan | 0.9367 | 0.9509 |
74
- | 0.0758 | 17.33 | 260 | 0.1073 | 0.9481 | 0.9730 | 0.9738 | nan | 0.9671 | 0.9790 | nan | 0.9417 | 0.9544 |
75
- | 0.0722 | 18.67 | 280 | 0.0999 | 0.9521 | 0.9755 | 0.9758 | nan | 0.9730 | 0.9780 | nan | 0.9464 | 0.9578 |
76
- | 0.0745 | 20.0 | 300 | 0.1016 | 0.9493 | 0.9744 | 0.9744 | nan | 0.9745 | 0.9742 | nan | 0.9434 | 0.9552 |
77
- | 0.0747 | 21.33 | 320 | 0.0970 | 0.9523 | 0.9754 | 0.9759 | nan | 0.9713 | 0.9795 | nan | 0.9465 | 0.9580 |
78
- | 0.0698 | 22.67 | 340 | 0.1026 | 0.9497 | 0.9736 | 0.9746 | nan | 0.9652 | 0.9819 | nan | 0.9434 | 0.9559 |
79
- | 0.0705 | 24.0 | 360 | 0.1242 | 0.9346 | 0.9642 | 0.9668 | nan | 0.9423 | 0.9860 | nan | 0.9257 | 0.9435 |
80
- | 0.0859 | 25.33 | 380 | 0.1081 | 0.9423 | 0.9687 | 0.9708 | nan | 0.9512 | 0.9862 | nan | 0.9347 | 0.9499 |
81
- | 0.0633 | 26.67 | 400 | 0.1094 | 0.9423 | 0.9687 | 0.9709 | nan | 0.9513 | 0.9861 | nan | 0.9347 | 0.9500 |
82
- | 0.0753 | 28.0 | 420 | 0.1055 | 0.9433 | 0.9694 | 0.9714 | nan | 0.9535 | 0.9854 | nan | 0.9359 | 0.9508 |
83
- | 0.0435 | 29.33 | 440 | 0.1014 | 0.9475 | 0.9721 | 0.9735 | nan | 0.9608 | 0.9835 | nan | 0.9409 | 0.9542 |
84
- | 0.1026 | 30.67 | 460 | 0.1031 | 0.9459 | 0.9714 | 0.9727 | nan | 0.9610 | 0.9818 | nan | 0.9391 | 0.9527 |
85
- | 0.0545 | 32.0 | 480 | 0.1023 | 0.9465 | 0.9716 | 0.9730 | nan | 0.9605 | 0.9827 | nan | 0.9397 | 0.9533 |
86
- | 0.0515 | 33.33 | 500 | 0.0970 | 0.9491 | 0.9731 | 0.9743 | nan | 0.9628 | 0.9833 | nan | 0.9427 | 0.9555 |
87
- | 0.082 | 34.67 | 520 | 0.0887 | 0.9549 | 0.9767 | 0.9773 | nan | 0.9725 | 0.9810 | nan | 0.9494 | 0.9604 |
88
- | 0.0483 | 36.0 | 540 | 0.0939 | 0.9515 | 0.9746 | 0.9756 | nan | 0.9666 | 0.9825 | nan | 0.9455 | 0.9576 |
89
- | 0.0563 | 37.33 | 560 | 0.0904 | 0.9539 | 0.9761 | 0.9768 | nan | 0.9709 | 0.9814 | nan | 0.9483 | 0.9596 |
90
- | 0.0517 | 38.67 | 580 | 0.0907 | 0.9538 | 0.9760 | 0.9767 | nan | 0.9698 | 0.9822 | nan | 0.9481 | 0.9595 |
91
- | 0.0508 | 40.0 | 600 | 0.1080 | 0.9435 | 0.9697 | 0.9714 | nan | 0.9556 | 0.9838 | nan | 0.9361 | 0.9508 |
92
- | 0.0408 | 41.33 | 620 | 0.0969 | 0.9497 | 0.9735 | 0.9746 | nan | 0.9642 | 0.9827 | nan | 0.9434 | 0.9560 |
93
- | 0.036 | 42.67 | 640 | 0.0973 | 0.9498 | 0.9735 | 0.9747 | nan | 0.9642 | 0.9828 | nan | 0.9435 | 0.9561 |
94
- | 0.09 | 44.0 | 660 | 0.0932 | 0.9527 | 0.9753 | 0.9762 | nan | 0.9682 | 0.9824 | nan | 0.9469 | 0.9586 |
95
- | 0.0402 | 45.33 | 680 | 0.0963 | 0.9506 | 0.9739 | 0.9751 | nan | 0.9644 | 0.9834 | nan | 0.9444 | 0.9568 |
96
- | 0.0675 | 46.67 | 700 | 0.1034 | 0.9465 | 0.9715 | 0.9730 | nan | 0.9592 | 0.9838 | nan | 0.9397 | 0.9534 |
97
- | 0.0431 | 48.0 | 720 | 0.0996 | 0.9486 | 0.9728 | 0.9741 | nan | 0.9623 | 0.9833 | nan | 0.9421 | 0.9551 |
98
- | 0.0602 | 49.33 | 740 | 0.0985 | 0.9498 | 0.9737 | 0.9746 | nan | 0.9658 | 0.9815 | nan | 0.9435 | 0.9560 |
99
 
100
 
101
  ### Framework versions
102
 
103
- - Transformers 4.35.0.dev0
104
- - Pytorch 2.1.0+cu118
105
- - Datasets 2.14.5
106
- - Tokenizers 0.14.0
 
17
 
18
  This model is a fine-tuned version of [nvidia/mit-b0](https://huggingface.co/nvidia/mit-b0) on the twdent/HikingHD dataset.
19
  It achieves the following results on the evaluation set:
20
+ - Loss: 0.1189
21
+ - Mean Iou: 0.9224
22
+ - Mean Accuracy: 0.9627
23
+ - Overall Accuracy: 0.9622
24
+ - Accuracy Traversable: 0.9645
25
+ - Accuracy Non-traversable: 0.9608
26
+ - Iou Traversable: 0.9032
27
+ - Iou Non-traversable: 0.9415
 
 
 
 
28
 
29
  ## Model description
30
 
 
53
 
54
  ### Training results
55
 
56
+ | Training Loss | Epoch | Step | Validation Loss | Mean Iou | Mean Accuracy | Overall Accuracy | Accuracy Traversable | Accuracy Non-traversable | Iou Traversable | Iou Non-traversable |
57
+ |:-------------:|:-----:|:----:|:---------------:|:--------:|:-------------:|:----------------:|:--------------------:|:------------------------:|:---------------:|:-------------------:|
58
+ | 0.1555 | 1.33 | 20 | 0.3462 | 0.8817 | 0.9484 | 0.9401 | 0.9792 | 0.9176 | 0.8568 | 0.9067 |
59
+ | 0.1168 | 2.67 | 40 | 0.1551 | 0.8998 | 0.9529 | 0.9503 | 0.9628 | 0.9431 | 0.8764 | 0.9233 |
60
+ | 0.1054 | 4.0 | 60 | 0.1566 | 0.8910 | 0.9527 | 0.9452 | 0.9807 | 0.9247 | 0.8675 | 0.9146 |
61
+ | 0.0775 | 5.33 | 80 | 0.1892 | 0.8645 | 0.9415 | 0.9304 | 0.9830 | 0.9000 | 0.8378 | 0.8912 |
62
+ | 0.1111 | 6.67 | 100 | 0.1369 | 0.9015 | 0.9515 | 0.9514 | 0.9520 | 0.9511 | 0.8776 | 0.9255 |
63
+ | 0.0737 | 8.0 | 120 | 0.1358 | 0.9005 | 0.9503 | 0.9510 | 0.9476 | 0.9529 | 0.8761 | 0.9249 |
64
+ | 0.0908 | 9.33 | 140 | 0.1186 | 0.9097 | 0.9565 | 0.9556 | 0.9599 | 0.9532 | 0.8878 | 0.9316 |
65
+ | 0.0654 | 10.67 | 160 | 0.1177 | 0.9182 | 0.9624 | 0.9599 | 0.9719 | 0.9529 | 0.8986 | 0.9377 |
66
+ | 0.0871 | 12.0 | 180 | 0.1220 | 0.9105 | 0.9546 | 0.9563 | 0.9482 | 0.9609 | 0.8880 | 0.9330 |
67
+ | 0.0493 | 13.33 | 200 | 0.1237 | 0.9126 | 0.9559 | 0.9573 | 0.9504 | 0.9613 | 0.8907 | 0.9346 |
68
+ | 0.0643 | 14.67 | 220 | 0.1232 | 0.9107 | 0.9503 | 0.9567 | 0.9265 | 0.9741 | 0.8868 | 0.9345 |
69
+ | 0.0491 | 16.0 | 240 | 0.1199 | 0.9140 | 0.9573 | 0.9580 | 0.9545 | 0.9600 | 0.8926 | 0.9355 |
70
+ | 0.0556 | 17.33 | 260 | 0.1114 | 0.9199 | 0.9613 | 0.9609 | 0.9628 | 0.9598 | 0.9001 | 0.9396 |
71
+ | 0.0484 | 18.67 | 280 | 0.1137 | 0.9189 | 0.9628 | 0.9603 | 0.9720 | 0.9535 | 0.8995 | 0.9383 |
72
+ | 0.0607 | 20.0 | 300 | 0.1230 | 0.9163 | 0.9625 | 0.9588 | 0.9762 | 0.9488 | 0.8966 | 0.9359 |
73
+ | 0.044 | 21.33 | 320 | 0.1349 | 0.9077 | 0.9567 | 0.9545 | 0.9648 | 0.9485 | 0.8858 | 0.9297 |
74
+ | 0.0426 | 22.67 | 340 | 0.1313 | 0.9070 | 0.9563 | 0.9541 | 0.9646 | 0.9481 | 0.8850 | 0.9291 |
75
+ | 0.0269 | 24.0 | 360 | 0.1143 | 0.9226 | 0.9668 | 0.9620 | 0.9850 | 0.9487 | 0.9046 | 0.9406 |
76
+ | 0.0593 | 25.33 | 380 | 0.1038 | 0.9235 | 0.9616 | 0.9629 | 0.9570 | 0.9662 | 0.9041 | 0.9428 |
77
+ | 0.0321 | 26.67 | 400 | 0.1136 | 0.9179 | 0.9598 | 0.9599 | 0.9595 | 0.9602 | 0.8976 | 0.9383 |
78
+ | 0.0752 | 28.0 | 420 | 0.1196 | 0.9194 | 0.9627 | 0.9606 | 0.9705 | 0.9548 | 0.9000 | 0.9388 |
79
+ | 0.0812 | 29.33 | 440 | 0.1253 | 0.9216 | 0.9665 | 0.9615 | 0.9854 | 0.9477 | 0.9035 | 0.9398 |
80
+ | 0.0329 | 30.67 | 460 | 0.1023 | 0.9294 | 0.9671 | 0.9657 | 0.9725 | 0.9618 | 0.9120 | 0.9467 |
81
+ | 0.035 | 32.0 | 480 | 0.0969 | 0.9282 | 0.9658 | 0.9651 | 0.9686 | 0.9631 | 0.9104 | 0.9460 |
82
+ | 0.0332 | 33.33 | 500 | 0.1086 | 0.9231 | 0.9620 | 0.9626 | 0.9598 | 0.9643 | 0.9038 | 0.9424 |
83
+ | 0.0343 | 34.67 | 520 | 0.0962 | 0.9312 | 0.9689 | 0.9666 | 0.9774 | 0.9603 | 0.9145 | 0.9480 |
84
+ | 0.0337 | 36.0 | 540 | 0.1072 | 0.9251 | 0.9649 | 0.9635 | 0.9703 | 0.9595 | 0.9067 | 0.9434 |
85
+ | 0.0367 | 37.33 | 560 | 0.1033 | 0.9302 | 0.9692 | 0.9660 | 0.9809 | 0.9574 | 0.9135 | 0.9470 |
86
+ | 0.0327 | 38.67 | 580 | 0.1014 | 0.9312 | 0.9681 | 0.9666 | 0.9734 | 0.9627 | 0.9143 | 0.9482 |
87
+ | 0.0293 | 40.0 | 600 | 0.1202 | 0.9207 | 0.9622 | 0.9613 | 0.9656 | 0.9588 | 0.9012 | 0.9401 |
88
+ | 0.0272 | 41.33 | 620 | 0.1113 | 0.9246 | 0.9634 | 0.9633 | 0.9637 | 0.9631 | 0.9058 | 0.9433 |
89
+ | 0.0304 | 42.67 | 640 | 0.1070 | 0.9253 | 0.9643 | 0.9637 | 0.9668 | 0.9619 | 0.9069 | 0.9438 |
90
+ | 0.037 | 44.0 | 660 | 0.1120 | 0.9228 | 0.9629 | 0.9624 | 0.9648 | 0.9610 | 0.9037 | 0.9419 |
91
+ | 0.0323 | 45.33 | 680 | 0.1132 | 0.9213 | 0.9615 | 0.9617 | 0.9609 | 0.9621 | 0.9016 | 0.9409 |
92
+ | 0.0281 | 46.67 | 700 | 0.1203 | 0.9199 | 0.9616 | 0.9609 | 0.9643 | 0.9589 | 0.9002 | 0.9396 |
93
+ | 0.0339 | 48.0 | 720 | 0.1124 | 0.9253 | 0.9646 | 0.9637 | 0.9683 | 0.9610 | 0.9070 | 0.9437 |
94
+ | 0.0289 | 49.33 | 740 | 0.1189 | 0.9224 | 0.9627 | 0.9622 | 0.9645 | 0.9608 | 0.9032 | 0.9415 |
95
 
96
 
97
  ### Framework versions
98
 
99
+ - Transformers 4.34.1
100
+ - Pytorch 2.1.1+cu121
101
+ - Datasets 2.14.6
102
+ - Tokenizers 0.14.1
config.json CHANGED
@@ -28,16 +28,14 @@
28
  256
29
  ],
30
  "id2label": {
31
- "0": "unlabeled",
32
- "1": "traversable",
33
- "2": "non-traversable"
34
  },
35
  "image_size": 224,
36
  "initializer_range": 0.02,
37
  "label2id": {
38
- "non-traversable": 2,
39
- "traversable": 1,
40
- "unlabeled": 0
41
  },
42
  "layer_norm_eps": 1e-06,
43
  "mlp_ratios": [
@@ -76,5 +74,5 @@
76
  2
77
  ],
78
  "torch_dtype": "float32",
79
- "transformers_version": "4.35.0.dev0"
80
  }
 
28
  256
29
  ],
30
  "id2label": {
31
+ "0": "traversable",
32
+ "1": "non-traversable"
 
33
  },
34
  "image_size": 224,
35
  "initializer_range": 0.02,
36
  "label2id": {
37
+ "non-traversable": 1,
38
+ "traversable": 0
 
39
  },
40
  "layer_norm_eps": 1e-06,
41
  "mlp_ratios": [
 
74
  2
75
  ],
76
  "torch_dtype": "float32",
77
+ "transformers_version": "4.34.1"
78
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0885794908689daac635ed92c460ffc86aa1baf305d9ebf918876669609294f1
3
- size 14933258
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:96eef92e5e0646b1b3fb67812a748f30468008149826b7ba6f2ca557285c549b
3
+ size 14932234
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:185fa32fa435206496c5590bd0758e800d3b1b533f349397fa4f2ebc71df1225
3
  size 4536
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:89e82587150c8430d94d783eb1902e7c37a30f4f15149cd0bf708acf74bed1fd
3
  size 4536