Upload folder using huggingface_hub
Browse files- F1_curve.png +0 -0
- PR_curve.png +0 -0
- P_curve.png +0 -0
- R_curve.png +0 -0
- confusion_matrix.png +0 -0
- confusion_matrix_normalized.png +0 -0
- events.out.tfevents.1722612819.b59495779e3e.1787.0 +3 -0
- labels.jpg +0 -0
- labels_correlogram.jpg +0 -0
- model_artifacts.json +1 -0
- opt.yaml +106 -0
- results.csv +101 -0
- results.png +0 -0
- roboflow_deploy.zip +3 -0
- state_dict.pt +3 -0
- train_batch0.jpg +0 -0
- train_batch1.jpg +0 -0
- train_batch2.jpg +0 -0
- train_batch7560.jpg +0 -0
- train_batch7561.jpg +0 -0
- train_batch7562.jpg +0 -0
- val_batch0_labels.jpg +0 -0
- val_batch0_pred.jpg +0 -0
- val_batch1_labels.jpg +0 -0
- val_batch1_pred.jpg +0 -0
- val_batch2_labels.jpg +0 -0
- val_batch2_pred.jpg +0 -0
- weights/best.pt +3 -0
- weights/model_artifacts.json +1 -0
- weights/roboflow_deploy.zip +3 -0
- weights/state_dict.pt +3 -0
F1_curve.png
ADDED
|
PR_curve.png
ADDED
|
P_curve.png
ADDED
|
R_curve.png
ADDED
|
confusion_matrix.png
ADDED
|
confusion_matrix_normalized.png
ADDED
|
events.out.tfevents.1722612819.b59495779e3e.1787.0
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a2e5ed0e41da863aa7316a9f79b3b4dffe25a421e598b3bc637814f9637bf6ab
|
| 3 |
+
size 544255
|
labels.jpg
ADDED
|
labels_correlogram.jpg
ADDED
|
model_artifacts.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"names": ["Eye"], "yaml": {"nc": 1, "backbone": [[-1, 1, "Conv", [64, 3, 2]], [-1, 1, "Conv", [128, 3, 2]], [-1, 1, "RepNCSPELAN4", [256, 128, 64, 1]], [-1, 1, "ADown", [256]], [-1, 1, "RepNCSPELAN4", [512, 256, 128, 1]], [-1, 1, "ADown", [512]], [-1, 1, "RepNCSPELAN4", [512, 512, 256, 1]], [-1, 1, "ADown", [512]], [-1, 1, "RepNCSPELAN4", [512, 512, 256, 1]], [-1, 1, "SPPELAN", [512, 256]]], "head": [[-1, 1, "nn.Upsample", ["None", 2, "nearest"]], [[-1, 6], 1, "Concat", [1]], [-1, 1, "RepNCSPELAN4", [512, 512, 256, 1]], [-1, 1, "nn.Upsample", ["None", 2, "nearest"]], [[-1, 4], 1, "Concat", [1]], [-1, 1, "RepNCSPELAN4", [256, 256, 128, 1]], [-1, 1, "ADown", [256]], [[-1, 12], 1, "Concat", [1]], [-1, 1, "RepNCSPELAN4", [512, 512, 256, 1]], [-1, 1, "ADown", [512]], [[-1, 9], 1, "Concat", [1]], [-1, 1, "RepNCSPELAN4", [512, 512, 256, 1]], [[15, 18, 21], 1, "Detect", ["nc"]]], "scale": "", "yaml_file": "yolov9c.yaml", "ch": 3}, "nc": 1, "args": {"model": "yolov9c.pt", "batch": 16, "imgsz": 640}, "ultralytics_version": "8.2.87", "model_type": "yolov8"}
|
opt.yaml
ADDED
|
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
task: detect
|
| 2 |
+
mode: train
|
| 3 |
+
model: yolov9c.pt
|
| 4 |
+
data: /content/drive/MyDrive/TrainedModels/EyesCareDataSet/data.yaml
|
| 5 |
+
epochs: 100
|
| 6 |
+
time: null
|
| 7 |
+
patience: 100
|
| 8 |
+
batch: 16
|
| 9 |
+
imgsz: 640
|
| 10 |
+
save: true
|
| 11 |
+
save_period: -1
|
| 12 |
+
cache: false
|
| 13 |
+
device: null
|
| 14 |
+
workers: 8
|
| 15 |
+
project: EyesCareV9C
|
| 16 |
+
name: train5
|
| 17 |
+
exist_ok: false
|
| 18 |
+
pretrained: true
|
| 19 |
+
optimizer: auto
|
| 20 |
+
verbose: true
|
| 21 |
+
seed: 0
|
| 22 |
+
deterministic: true
|
| 23 |
+
single_cls: false
|
| 24 |
+
rect: false
|
| 25 |
+
cos_lr: false
|
| 26 |
+
close_mosaic: 10
|
| 27 |
+
resume: false
|
| 28 |
+
amp: true
|
| 29 |
+
fraction: 1.0
|
| 30 |
+
profile: false
|
| 31 |
+
freeze: null
|
| 32 |
+
multi_scale: false
|
| 33 |
+
overlap_mask: true
|
| 34 |
+
mask_ratio: 4
|
| 35 |
+
dropout: 0.0
|
| 36 |
+
val: true
|
| 37 |
+
split: val
|
| 38 |
+
save_json: false
|
| 39 |
+
save_hybrid: false
|
| 40 |
+
conf: 0.75
|
| 41 |
+
iou: 0.7
|
| 42 |
+
max_det: 300
|
| 43 |
+
half: false
|
| 44 |
+
dnn: false
|
| 45 |
+
plots: true
|
| 46 |
+
source: null
|
| 47 |
+
vid_stride: 1
|
| 48 |
+
stream_buffer: false
|
| 49 |
+
visualize: false
|
| 50 |
+
augment: false
|
| 51 |
+
agnostic_nms: false
|
| 52 |
+
classes: null
|
| 53 |
+
retina_masks: false
|
| 54 |
+
embed: null
|
| 55 |
+
show: false
|
| 56 |
+
save_frames: false
|
| 57 |
+
save_txt: false
|
| 58 |
+
save_conf: false
|
| 59 |
+
save_crop: false
|
| 60 |
+
show_labels: true
|
| 61 |
+
show_conf: true
|
| 62 |
+
show_boxes: true
|
| 63 |
+
line_width: null
|
| 64 |
+
format: torchscript
|
| 65 |
+
keras: false
|
| 66 |
+
optimize: false
|
| 67 |
+
int8: false
|
| 68 |
+
dynamic: false
|
| 69 |
+
simplify: false
|
| 70 |
+
opset: null
|
| 71 |
+
workspace: 4
|
| 72 |
+
nms: false
|
| 73 |
+
lr0: 0.01
|
| 74 |
+
lrf: 0.01
|
| 75 |
+
momentum: 0.937
|
| 76 |
+
weight_decay: 0.0005
|
| 77 |
+
warmup_epochs: 3.0
|
| 78 |
+
warmup_momentum: 0.8
|
| 79 |
+
warmup_bias_lr: 0.1
|
| 80 |
+
box: 7.5
|
| 81 |
+
cls: 0.5
|
| 82 |
+
dfl: 1.5
|
| 83 |
+
pose: 12.0
|
| 84 |
+
kobj: 1.0
|
| 85 |
+
label_smoothing: 0.0
|
| 86 |
+
nbs: 64
|
| 87 |
+
hsv_h: 0.015
|
| 88 |
+
hsv_s: 0.7
|
| 89 |
+
hsv_v: 0.4
|
| 90 |
+
degrees: 0.0
|
| 91 |
+
translate: 0.1
|
| 92 |
+
scale: 0.5
|
| 93 |
+
shear: 0.0
|
| 94 |
+
perspective: 0.0
|
| 95 |
+
flipud: 0.0
|
| 96 |
+
fliplr: 0.5
|
| 97 |
+
bgr: 0.0
|
| 98 |
+
mosaic: 1.0
|
| 99 |
+
mixup: 0.0
|
| 100 |
+
copy_paste: 0.0
|
| 101 |
+
auto_augment: randaugment
|
| 102 |
+
erasing: 0.4
|
| 103 |
+
crop_fraction: 1.0
|
| 104 |
+
cfg: null
|
| 105 |
+
tracker: botsort.yaml
|
| 106 |
+
save_dir: EyesCareV9C/train5
|
results.csv
ADDED
|
@@ -0,0 +1,101 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
epoch, train/box_loss, train/cls_loss, train/dfl_loss, metrics/precision(B), metrics/recall(B), metrics/mAP50(B), metrics/mAP50-95(B), val/box_loss, val/cls_loss, val/dfl_loss, lr/pg0, lr/pg1, lr/pg2
|
| 2 |
+
1, 1.3478, 1.3718, 1.8262, 0.01273, 0.01347, 0.00679, 0.00219, 5.4969, 13.097, 15.69, 0.00065873, 0.00065873, 0.00065873
|
| 3 |
+
2, 1.3922, 1.0492, 1.836, 0.085, 0.11448, 0.04974, 0.01989, 3.939, 5.6935, 7.6263, 0.0013123, 0.0013123, 0.0013123
|
| 4 |
+
3, 1.3559, 0.99059, 1.8046, 0.30682, 0.09091, 0.16854, 0.06396, 2.4277, 2.5658, 3.7646, 0.0019526, 0.0019526, 0.0019526
|
| 5 |
+
4, 1.3284, 0.96054, 1.7755, 0.59434, 0.21212, 0.38725, 0.11933, 3.1975, 2.1, 4.9834, 0.0019406, 0.0019406, 0.0019406
|
| 6 |
+
5, 1.265, 0.87471, 1.7208, 0.55556, 0.03367, 0.30101, 0.14644, 3.1218, 4.1386, 4.0469, 0.0019208, 0.0019208, 0.0019208
|
| 7 |
+
6, 1.2434, 0.85411, 1.7102, 0.66537, 0.57576, 0.62507, 0.21101, 2.8025, 1.6863, 4.7715, 0.001901, 0.001901, 0.001901
|
| 8 |
+
7, 1.2078, 0.79748, 1.6543, 0.99259, 0.90236, 0.95048, 0.63581, 1.4675, 0.67989, 2.6689, 0.0018812, 0.0018812, 0.0018812
|
| 9 |
+
8, 1.1916, 0.7914, 1.6471, 0.98507, 0.88889, 0.94275, 0.64041, 1.4162, 0.71734, 2.6028, 0.0018614, 0.0018614, 0.0018614
|
| 10 |
+
9, 1.1597, 0.75815, 1.6211, 0.97576, 0.54209, 0.76414, 0.45217, 1.9686, 0.94967, 3.2282, 0.0018416, 0.0018416, 0.0018416
|
| 11 |
+
10, 1.1518, 0.7565, 1.6145, 1, 0.53535, 0.76765, 0.48182, 1.8186, 0.82421, 3.183, 0.0018218, 0.0018218, 0.0018218
|
| 12 |
+
11, 1.1018, 0.70017, 1.5828, 0.9834, 0.79798, 0.89649, 0.58216, 1.5045, 0.7954, 2.6974, 0.001802, 0.001802, 0.001802
|
| 13 |
+
12, 1.1146, 0.69738, 1.5907, 0.98157, 0.71717, 0.8549, 0.53135, 1.671, 0.7921, 2.9258, 0.0017822, 0.0017822, 0.0017822
|
| 14 |
+
13, 1.1048, 0.69778, 1.5741, 0.92334, 0.89226, 0.93501, 0.57812, 1.5841, 0.78608, 2.761, 0.0017624, 0.0017624, 0.0017624
|
| 15 |
+
14, 1.0977, 0.67001, 1.5709, 1, 0.67003, 0.83502, 0.61499, 1.3701, 0.61031, 2.4018, 0.0017426, 0.0017426, 0.0017426
|
| 16 |
+
15, 1.092, 0.66586, 1.5591, 0.9668, 0.78451, 0.88791, 0.59178, 1.5076, 0.72814, 2.6223, 0.0017228, 0.0017228, 0.0017228
|
| 17 |
+
16, 1.0669, 0.65043, 1.5411, 0.99602, 0.84175, 0.92021, 0.65345, 1.3433, 0.61592, 2.4587, 0.001703, 0.001703, 0.001703
|
| 18 |
+
17, 1.0669, 0.64179, 1.5254, 0.99091, 0.73401, 0.86534, 0.62988, 1.4176, 0.62969, 2.4623, 0.0016832, 0.0016832, 0.0016832
|
| 19 |
+
18, 1.0713, 0.65812, 1.5421, 0.98419, 0.83838, 0.91626, 0.62718, 1.4537, 0.60889, 2.5961, 0.0016634, 0.0016634, 0.0016634
|
| 20 |
+
19, 1.0559, 0.63673, 1.5356, 0.99408, 0.56566, 0.7811, 0.54133, 1.5615, 0.65922, 2.7743, 0.0016436, 0.0016436, 0.0016436
|
| 21 |
+
20, 1.0584, 0.6235, 1.5253, 0.97473, 0.90909, 0.95245, 0.68153, 1.2979, 0.58689, 2.3091, 0.0016238, 0.0016238, 0.0016238
|
| 22 |
+
21, 1.0555, 0.61948, 1.5227, 0.98876, 0.88889, 0.94331, 0.69681, 1.2508, 0.53997, 2.2712, 0.001604, 0.001604, 0.001604
|
| 23 |
+
22, 1.0246, 0.60863, 1.5156, 0.99609, 0.85859, 0.92888, 0.64509, 1.4572, 0.57637, 2.6002, 0.0015842, 0.0015842, 0.0015842
|
| 24 |
+
23, 1.0194, 0.61261, 1.5037, 1, 0.76431, 0.8821, 0.5878, 1.6647, 0.59034, 2.8853, 0.0015644, 0.0015644, 0.0015644
|
| 25 |
+
24, 1.034, 0.61791, 1.5137, 1, 0.74074, 0.87036, 0.63358, 1.3914, 0.5494, 2.3891, 0.0015446, 0.0015446, 0.0015446
|
| 26 |
+
25, 1.017, 0.60895, 1.51, 1, 0.80471, 0.90229, 0.66935, 1.3035, 0.52604, 2.3286, 0.0015248, 0.0015248, 0.0015248
|
| 27 |
+
26, 1.0049, 0.58844, 1.4853, 1, 0.86195, 0.93092, 0.68053, 1.2989, 0.50234, 2.324, 0.001505, 0.001505, 0.001505
|
| 28 |
+
27, 1.0239, 0.58998, 1.4901, 0.99576, 0.79125, 0.89479, 0.64356, 1.4285, 0.54994, 2.5179, 0.0014852, 0.0014852, 0.0014852
|
| 29 |
+
28, 1.0179, 0.58364, 1.4981, 1, 0.69024, 0.84511, 0.62432, 1.4143, 0.52522, 2.4172, 0.0014654, 0.0014654, 0.0014654
|
| 30 |
+
29, 1.001, 0.5726, 1.4808, 1, 0.87205, 0.93596, 0.68831, 1.274, 0.49818, 2.3599, 0.0014456, 0.0014456, 0.0014456
|
| 31 |
+
30, 0.99356, 0.56136, 1.4848, 1, 0.68013, 0.84007, 0.64316, 1.3109, 0.50287, 2.3083, 0.0014258, 0.0014258, 0.0014258
|
| 32 |
+
31, 0.9781, 0.56076, 1.4732, 1, 0.86195, 0.93092, 0.69376, 1.2799, 0.51329, 2.3302, 0.001406, 0.001406, 0.001406
|
| 33 |
+
32, 1.0079, 0.57048, 1.4823, 1, 0.81818, 0.90905, 0.61826, 1.5651, 0.55416, 2.8562, 0.0013862, 0.0013862, 0.0013862
|
| 34 |
+
33, 0.99136, 0.55723, 1.4732, 1, 0.79125, 0.8956, 0.65213, 1.3681, 0.51104, 2.4481, 0.0013664, 0.0013664, 0.0013664
|
| 35 |
+
34, 0.9636, 0.55331, 1.451, 0.9854, 0.90909, 0.95333, 0.68223, 1.3425, 0.53595, 2.4244, 0.0013466, 0.0013466, 0.0013466
|
| 36 |
+
35, 0.979, 0.55672, 1.478, 1, 0.76768, 0.8838, 0.64553, 1.4203, 0.51859, 2.4285, 0.0013268, 0.0013268, 0.0013268
|
| 37 |
+
36, 0.94252, 0.53481, 1.449, 1, 0.85185, 0.92587, 0.62724, 1.5067, 0.48609, 2.5732, 0.001307, 0.001307, 0.001307
|
| 38 |
+
37, 0.97911, 0.5495, 1.4753, 0.9913, 0.76768, 0.88232, 0.59737, 1.5876, 0.5396, 2.8028, 0.0012872, 0.0012872, 0.0012872
|
| 39 |
+
38, 0.9748, 0.54739, 1.4574, 0.98828, 0.85185, 0.92345, 0.65264, 1.3816, 0.48299, 2.3807, 0.0012674, 0.0012674, 0.0012674
|
| 40 |
+
39, 0.96181, 0.53764, 1.4428, 0.99569, 0.77778, 0.88823, 0.62514, 1.4848, 0.50658, 2.5851, 0.0012476, 0.0012476, 0.0012476
|
| 41 |
+
40, 0.97498, 0.53196, 1.4617, 1, 0.67677, 0.83835, 0.64278, 1.2849, 0.46876, 2.2352, 0.0012278, 0.0012278, 0.0012278
|
| 42 |
+
41, 0.95994, 0.53584, 1.4515, 1, 0.80135, 0.90064, 0.64919, 1.4122, 0.52316, 2.4286, 0.001208, 0.001208, 0.001208
|
| 43 |
+
42, 0.96609, 0.53921, 1.4602, 0.99262, 0.90572, 0.95238, 0.68656, 1.3259, 0.48855, 2.4381, 0.0011882, 0.0011882, 0.0011882
|
| 44 |
+
43, 0.95538, 0.5334, 1.4405, 0.98842, 0.86195, 0.9294, 0.64768, 1.413, 0.49706, 2.5352, 0.0011684, 0.0011684, 0.0011684
|
| 45 |
+
44, 0.96929, 0.53549, 1.4538, 1, 0.74411, 0.87201, 0.64593, 1.3403, 0.46793, 2.3406, 0.0011486, 0.0011486, 0.0011486
|
| 46 |
+
45, 0.95766, 0.5201, 1.44, 0.99611, 0.86195, 0.93066, 0.6633, 1.3679, 0.49041, 2.4672, 0.0011288, 0.0011288, 0.0011288
|
| 47 |
+
46, 0.94408, 0.51216, 1.4403, 0.9959, 0.81818, 0.90866, 0.67668, 1.3368, 0.49565, 2.3125, 0.001109, 0.001109, 0.001109
|
| 48 |
+
47, 0.94649, 0.50635, 1.4367, 1, 0.7037, 0.85181, 0.62768, 1.383, 0.47548, 2.4108, 0.0010892, 0.0010892, 0.0010892
|
| 49 |
+
48, 0.94115, 0.51912, 1.4439, 1, 0.83502, 0.91743, 0.63572, 1.4657, 0.50808, 2.5283, 0.0010694, 0.0010694, 0.0010694
|
| 50 |
+
49, 0.94782, 0.52734, 1.4436, 1, 0.87542, 0.93761, 0.66679, 1.4033, 0.4886, 2.3673, 0.0010496, 0.0010496, 0.0010496
|
| 51 |
+
50, 0.942, 0.51766, 1.4456, 1, 0.72391, 0.86191, 0.63399, 1.4498, 0.49298, 2.4747, 0.0010298, 0.0010298, 0.0010298
|
| 52 |
+
51, 0.94012, 0.52371, 1.4373, 0.98897, 0.90572, 0.9516, 0.65287, 1.4631, 0.52538, 2.5313, 0.00101, 0.00101, 0.00101
|
| 53 |
+
52, 0.9233, 0.50079, 1.4224, 0.99592, 0.82155, 0.91035, 0.64951, 1.4476, 0.49726, 2.5551, 0.0009902, 0.0009902, 0.0009902
|
| 54 |
+
53, 0.93849, 0.50722, 1.4268, 0.98889, 0.89899, 0.94812, 0.65051, 1.4595, 0.50114, 2.5102, 0.0009704, 0.0009704, 0.0009704
|
| 55 |
+
54, 0.93696, 0.50834, 1.4273, 1, 0.82155, 0.91074, 0.64756, 1.4396, 0.49129, 2.4665, 0.0009506, 0.0009506, 0.0009506
|
| 56 |
+
55, 0.93805, 0.50211, 1.4299, 0.99623, 0.88889, 0.94418, 0.66916, 1.3704, 0.48048, 2.403, 0.0009308, 0.0009308, 0.0009308
|
| 57 |
+
56, 0.9266, 0.49136, 1.4187, 1, 0.83502, 0.91743, 0.66593, 1.4284, 0.517, 2.5876, 0.000911, 0.000911, 0.000911
|
| 58 |
+
57, 0.91673, 0.5024, 1.4277, 1, 0.84512, 0.92248, 0.62864, 1.5073, 0.4868, 2.7354, 0.0008912, 0.0008912, 0.0008912
|
| 59 |
+
58, 0.93566, 0.4929, 1.4254, 0.99606, 0.85185, 0.92544, 0.66626, 1.4069, 0.46941, 2.4303, 0.0008714, 0.0008714, 0.0008714
|
| 60 |
+
59, 0.91572, 0.48294, 1.4093, 1, 0.85522, 0.92752, 0.66291, 1.4211, 0.50224, 2.5361, 0.0008516, 0.0008516, 0.0008516
|
| 61 |
+
60, 0.91344, 0.49736, 1.4192, 1, 0.78114, 0.89055, 0.63705, 1.4465, 0.49754, 2.4842, 0.0008318, 0.0008318, 0.0008318
|
| 62 |
+
61, 0.92522, 0.49194, 1.4179, 0.98456, 0.85859, 0.92706, 0.67955, 1.3519, 0.4904, 2.3712, 0.000812, 0.000812, 0.000812
|
| 63 |
+
62, 0.90989, 0.48228, 1.4167, 0.99595, 0.82828, 0.91366, 0.64975, 1.4192, 0.46464, 2.519, 0.0007922, 0.0007922, 0.0007922
|
| 64 |
+
63, 0.90048, 0.48363, 1.401, 0.99617, 0.87542, 0.93729, 0.67238, 1.3481, 0.4694, 2.4206, 0.0007724, 0.0007724, 0.0007724
|
| 65 |
+
64, 0.90954, 0.49338, 1.417, 0.99206, 0.84175, 0.92015, 0.6897, 1.2754, 0.43457, 2.3034, 0.0007526, 0.0007526, 0.0007526
|
| 66 |
+
65, 0.89196, 0.47369, 1.395, 0.9878, 0.81818, 0.90716, 0.65679, 1.3875, 0.46299, 2.4236, 0.0007328, 0.0007328, 0.0007328
|
| 67 |
+
66, 0.89988, 0.47276, 1.4006, 0.99234, 0.87205, 0.93534, 0.68949, 1.261, 0.44364, 2.2652, 0.000713, 0.000713, 0.000713
|
| 68 |
+
67, 0.90453, 0.48296, 1.4171, 0.99615, 0.87205, 0.93565, 0.61787, 1.5849, 0.52934, 2.7921, 0.0006932, 0.0006932, 0.0006932
|
| 69 |
+
68, 0.89759, 0.47485, 1.4045, 0.99603, 0.84512, 0.92205, 0.63909, 1.4769, 0.48695, 2.6058, 0.0006734, 0.0006734, 0.0006734
|
| 70 |
+
69, 0.88056, 0.46418, 1.3954, 0.98188, 0.91246, 0.95472, 0.67872, 1.3664, 0.46079, 2.4766, 0.0006536, 0.0006536, 0.0006536
|
| 71 |
+
70, 0.90106, 0.4595, 1.4044, 0.99593, 0.82492, 0.91175, 0.63354, 1.4977, 0.46126, 2.5482, 0.0006338, 0.0006338, 0.0006338
|
| 72 |
+
71, 0.90963, 0.48202, 1.4026, 0.99615, 0.87205, 0.9355, 0.66681, 1.3992, 0.45008, 2.4364, 0.000614, 0.000614, 0.000614
|
| 73 |
+
72, 0.88411, 0.47371, 1.3903, 0.98901, 0.90909, 0.9537, 0.68587, 1.3554, 0.46145, 2.4393, 0.0005942, 0.0005942, 0.0005942
|
| 74 |
+
73, 0.8979, 0.46949, 1.3989, 0.99167, 0.80135, 0.89943, 0.63464, 1.5445, 0.47869, 2.6383, 0.0005744, 0.0005744, 0.0005744
|
| 75 |
+
74, 0.90887, 0.46921, 1.4102, 1, 0.83838, 0.91915, 0.66879, 1.3589, 0.43575, 2.4006, 0.0005546, 0.0005546, 0.0005546
|
| 76 |
+
75, 0.88551, 0.45933, 1.3896, 0.99225, 0.86195, 0.93021, 0.66972, 1.3988, 0.46016, 2.4616, 0.0005348, 0.0005348, 0.0005348
|
| 77 |
+
76, 0.89449, 0.46384, 1.4, 0.99603, 0.84512, 0.92173, 0.63648, 1.5051, 0.4818, 2.6119, 0.000515, 0.000515, 0.000515
|
| 78 |
+
77, 0.87838, 0.45375, 1.3781, 0.98864, 0.87879, 0.93819, 0.66895, 1.4037, 0.4544, 2.47, 0.0004952, 0.0004952, 0.0004952
|
| 79 |
+
78, 0.88528, 0.45119, 1.387, 0.98551, 0.91582, 0.95659, 0.68932, 1.3391, 0.43608, 2.3948, 0.0004754, 0.0004754, 0.0004754
|
| 80 |
+
79, 0.87915, 0.45753, 1.3948, 1, 0.85522, 0.92752, 0.67852, 1.353, 0.44312, 2.4037, 0.0004556, 0.0004556, 0.0004556
|
| 81 |
+
80, 0.86558, 0.45818, 1.383, 0.97232, 0.94613, 0.97032, 0.65394, 1.476, 0.46769, 2.6132, 0.0004358, 0.0004358, 0.0004358
|
| 82 |
+
81, 0.87766, 0.45616, 1.3845, 0.98535, 0.90572, 0.95085, 0.66991, 1.4086, 0.43914, 2.4867, 0.000416, 0.000416, 0.000416
|
| 83 |
+
82, 0.88427, 0.4659, 1.3854, 0.98535, 0.90572, 0.95128, 0.67435, 1.3983, 0.44466, 2.4907, 0.0003962, 0.0003962, 0.0003962
|
| 84 |
+
83, 0.87161, 0.44787, 1.3778, 0.99209, 0.84512, 0.92175, 0.65718, 1.4406, 0.44193, 2.5238, 0.0003764, 0.0003764, 0.0003764
|
| 85 |
+
84, 0.87491, 0.43912, 1.3804, 0.9878, 0.81818, 0.90745, 0.6344, 1.505, 0.47644, 2.6386, 0.0003566, 0.0003566, 0.0003566
|
| 86 |
+
85, 0.85046, 0.43637, 1.3758, 0.99222, 0.85859, 0.92791, 0.65096, 1.4376, 0.45598, 2.5513, 0.0003368, 0.0003368, 0.0003368
|
| 87 |
+
86, 0.85985, 0.43885, 1.3698, 0.98885, 0.89562, 0.94673, 0.67424, 1.4043, 0.46118, 2.4907, 0.000317, 0.000317, 0.000317
|
| 88 |
+
87, 0.85343, 0.44242, 1.3639, 0.99262, 0.90572, 0.95212, 0.69238, 1.3146, 0.43872, 2.3239, 0.0002972, 0.0002972, 0.0002972
|
| 89 |
+
88, 0.8668, 0.44139, 1.3755, 0.98134, 0.88552, 0.94074, 0.6778, 1.3622, 0.45564, 2.4174, 0.0002774, 0.0002774, 0.0002774
|
| 90 |
+
89, 0.8663, 0.45031, 1.3774, 0.99231, 0.86869, 0.93361, 0.68103, 1.3631, 0.43537, 2.4287, 0.0002576, 0.0002576, 0.0002576
|
| 91 |
+
90, 0.84358, 0.43307, 1.3598, 0.98824, 0.84848, 0.92315, 0.67424, 1.3391, 0.41945, 2.3748, 0.0002378, 0.0002378, 0.0002378
|
| 92 |
+
91, 0.73533, 0.31995, 1.5025, 0.98819, 0.84512, 0.92096, 0.6719, 1.3428, 0.44235, 2.3746, 0.000218, 0.000218, 0.000218
|
| 93 |
+
92, 0.73367, 0.28682, 1.4978, 0.99602, 0.84175, 0.92049, 0.66825, 1.3798, 0.43313, 2.4665, 0.0001982, 0.0001982, 0.0001982
|
| 94 |
+
93, 0.7154, 0.2854, 1.4861, 1, 0.83838, 0.91915, 0.67705, 1.3601, 0.43243, 2.4514, 0.0001784, 0.0001784, 0.0001784
|
| 95 |
+
94, 0.72143, 0.28107, 1.5006, 0.98851, 0.86869, 0.93335, 0.67054, 1.3819, 0.43954, 2.4453, 0.0001586, 0.0001586, 0.0001586
|
| 96 |
+
95, 0.70859, 0.27644, 1.4875, 0.99609, 0.85859, 0.92896, 0.66574, 1.4123, 0.44681, 2.5099, 0.0001388, 0.0001388, 0.0001388
|
| 97 |
+
96, 0.70266, 0.27193, 1.4672, 0.98188, 0.91246, 0.95493, 0.66405, 1.4255, 0.44663, 2.5752, 0.000119, 0.000119, 0.000119
|
| 98 |
+
97, 0.69778, 0.27072, 1.4588, 0.97778, 0.88889, 0.94262, 0.65536, 1.4441, 0.45636, 2.602, 9.92e-05, 9.92e-05, 9.92e-05
|
| 99 |
+
98, 0.69723, 0.26729, 1.4658, 0.97826, 0.90909, 0.95206, 0.67036, 1.4061, 0.44486, 2.5214, 7.94e-05, 7.94e-05, 7.94e-05
|
| 100 |
+
99, 0.6767, 0.26387, 1.4429, 0.97834, 0.91246, 0.95368, 0.68062, 1.3823, 0.4365, 2.5072, 5.96e-05, 5.96e-05, 5.96e-05
|
| 101 |
+
100, 0.69184, 0.26839, 1.4539, 0.98529, 0.90236, 0.94934, 0.66894, 1.4085, 0.43594, 2.5444, 3.98e-05, 3.98e-05, 3.98e-05
|
results.png
ADDED
|
roboflow_deploy.zip
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:86a9e8749eca7009bddfc9eca26952727861a22358a8c807a124d4a39116cf5e
|
| 3 |
+
size 47726274
|
state_dict.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:18a181cb69ac9720b1b46027690ea1d5ae335562929199b98f260f96088a7aaf
|
| 3 |
+
size 51486567
|
train_batch0.jpg
ADDED
|
train_batch1.jpg
ADDED
|
train_batch2.jpg
ADDED
|
train_batch7560.jpg
ADDED
|
train_batch7561.jpg
ADDED
|
train_batch7562.jpg
ADDED
|
val_batch0_labels.jpg
ADDED
|
val_batch0_pred.jpg
ADDED
|
val_batch1_labels.jpg
ADDED
|
val_batch1_pred.jpg
ADDED
|
val_batch2_labels.jpg
ADDED
|
val_batch2_pred.jpg
ADDED
|
weights/best.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:90365011becfe117f9f8c8e54884b99022b69301d95ccd0452cc9afd10c24bb7
|
| 3 |
+
size 51594821
|
weights/model_artifacts.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"names": ["Eye"], "yaml": {"nc": 1, "backbone": [[-1, 1, "Conv", [64, 3, 2]], [-1, 1, "Conv", [128, 3, 2]], [-1, 1, "RepNCSPELAN4", [256, 128, 64, 1]], [-1, 1, "ADown", [256]], [-1, 1, "RepNCSPELAN4", [512, 256, 128, 1]], [-1, 1, "ADown", [512]], [-1, 1, "RepNCSPELAN4", [512, 512, 256, 1]], [-1, 1, "ADown", [512]], [-1, 1, "RepNCSPELAN4", [512, 512, 256, 1]], [-1, 1, "SPPELAN", [512, 256]]], "head": [[-1, 1, "nn.Upsample", ["None", 2, "nearest"]], [[-1, 6], 1, "Concat", [1]], [-1, 1, "RepNCSPELAN4", [512, 512, 256, 1]], [-1, 1, "nn.Upsample", ["None", 2, "nearest"]], [[-1, 4], 1, "Concat", [1]], [-1, 1, "RepNCSPELAN4", [256, 256, 128, 1]], [-1, 1, "ADown", [256]], [[-1, 12], 1, "Concat", [1]], [-1, 1, "RepNCSPELAN4", [512, 512, 256, 1]], [-1, 1, "ADown", [512]], [[-1, 9], 1, "Concat", [1]], [-1, 1, "RepNCSPELAN4", [512, 512, 256, 1]], [[15, 18, 21], 1, "Detect", ["nc"]]], "scale": "", "yaml_file": "yolov9c.yaml", "ch": 3}, "nc": 1, "args": {"model": "yolov9c.pt", "batch": 16, "imgsz": 640}, "ultralytics_version": "8.2.87", "model_type": "yolov8"}
|
weights/roboflow_deploy.zip
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:89daa98f4e0c6d3b5a50c8a1044b3c51a8f4f2494ea500f867c2456cd38ca4ed
|
| 3 |
+
size 47482762
|
weights/state_dict.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:18a181cb69ac9720b1b46027690ea1d5ae335562929199b98f260f96088a7aaf
|
| 3 |
+
size 51486567
|