# /home_ssd/lhc/hand_detect_v3/2025-10-12_11_22_32 : 0.0
# /home_ssd/lhc/hand_detect_v3/2025-10-12_11_23_19 : 0.0
# /home_ssd/lhc/hand_detect_v3/2025-10-12_11_23_54 : 0.0
# /home_ssd/lhc/hand_detect_v3/2025-10-12_11_24_28 : 1.0
# /home_ssd/lhc/hand_detect_v3/2025-10-12_11_25_00 : 1.0
# /home_ssd/lhc/hand_detect_v3/2025-10-12_11_25_34 : 1.0
# /home_ssd/lhc/hand_detect_v3/2025-10-12_11_32_59 : 1.0

# /home_ssd/lhc/hand_detect_v3/2025-10-12_11_33_39 : 0.0
# /home_ssd/lhc/hand_detect_v3/2025-10-12_11_34_43 : 0.0
# /home_ssd/lhc/hand_detect_v3/2025-10-12_11_35_25 : 1.0
# /home_ssd/lhc/hand_detect_v3/2025-10-12_11_39_03 : 1.0
# /home_ssd/lhc/hand_detect_v3/2025-10-12_11_39_43 : 0.0
# /home_ssd/lhc/hand_detect_v3/2025-10-12_11_43_03 : 1.0
# /home_ssd/lhc/hand_detect_v3/2025-10-12_11_43_55 : 1.0

# /home_ssd/lhc/hand_detect_v3/2025-10-12_11_44_34 : 0.0
# /home_ssd/lhc/hand_detect_v3/2025-10-12_11_45_08 : 0.0
# /home_ssd/lhc/hand_detect_v3/2025-10-12_11_46_18 : 0.0
# /home_ssd/lhc/hand_detect_v3/2025-10-12_11_47_02 : 0.0 => 1.0
# /home_ssd/lhc/hand_detect_v3/2025-10-12_11_51_25 : 0.0 => 2.0
# /home_ssd/lhc/hand_detect_v3/2025-10-12_11_52_24 : 0.0 => 2.0
# /home_ssd/lhc/hand_detect_v3/2025-10-12_11_53_08 : 0.0 => 3.0
# /home_ssd/lhc/hand_detect_v3/2025-10-12_11_53_46 : 0.0 => 3.0
# /home_ssd/lhc/hand_detect_v3/2025-10-12_11_54_38 : 0.0 => 4.0
# /home_ssd/lhc/hand_detect_v3/2025-10-12_11_55_13 : 0.0 => 4.0
# /home_ssd/lhc/hand_detect_v3/2025-10-12_11_55_56 : 0.0 => 1.0
# /home_ssd/lhc/hand_detect_v3/2025-10-12_11_56_29 : 0.0 => 1.0
# /home_ssd/lhc/hand_detect_v3/2025-10-12_11_57_04 : 0.0
# /home_ssd/lhc/hand_detect_v3/2025-10-12_11_57_46 : 0.0
# /home_ssd/lhc/hand_detect_v3/2025-10-12_12_01_16 : 0.0
# /home_ssd/lhc/hand_detect_v3/2025-10-12_12_02_02 : 0.0
# /home_ssd/lhc/hand_detect_v3/2025-10-12_12_02_38 : 0.0 => 1.0
# /home_ssd/lhc/hand_detect_v3/2025-10-12_12_03_18 : 0.0


import warnings
warnings.filterwarnings("ignore")
import os
import argparse

import torch.distributed as dist
import torch.nn.functional as F
import torch.optim as optim
import torch.optim.lr_scheduler as lr_scheduler
import yaml

import test  # import test.py to get mAP after each epoch
from models.yolo import Model
from utils.datasets import *
from utils.wiky_datasets_ver3 import *
from utils.utils import *
from tqdm import tqdm

import matplotlib.pyplot as plt
from torch.utils.data import DataLoader

def count_dataset(path = '/home_ssd/lhc/hand_detect_v3',data_type = 5):
    
    data_count = [0 for i in range(data_type)]

    dataset  = BigHandDetectionDataset_Dual_2Label(path = path ,
                                        is_train= False,
                                        event_subdir = "event_img_120fps",
                                        event_needed = -1,
                                        train_step = 1,)
    
    # 使用 DataLoader 以启用多进程预取，加速 I/O 与预处理（保持 batch_size=1 以兼容现有逻辑）
    num_workers = 16
    pin_mem = torch.cuda.is_available()
    dataset = DataLoader(dataset, batch_size=1, num_workers=num_workers, pin_memory=pin_mem, collate_fn=lambda b: b[0])

    for batch in tqdm(dataset):

        img, eve, img_labels, eve_lavels, name = batch


        if len(img_labels) == 0: # no label pass

            if len(eve_lavels) == 0:
                continue
            
            else:
                data_count[int(eve_lavels[0,1])] += 1

        else:
            data_count[int(img_labels[0,1])] += 1

            # print(img_labels.shape, img_labels)
            # torch.Size([1, 6]) tensor([[0.00000, 3.00000, 0.52617, 0.42578, 0.04141, 0.13281]])

    return data_count


def count_dataset_2(path = '/home_ssd/lhc/hand_detect_v3',data_type = 5):


    data_count = [0 for i in range(data_type)]
            
    subs  = sorted(os.listdir(path))

    for sub in subs:
        sub_path = os.path.join(path, sub)

        # print(sub_path)
        dataset = SmallHandDetectionDataset_2Label(root_dir = sub_path,
                                                event_subdir= "event_img_120fps",
                                                event_needed=-1,
                                                is_train = False,
                                                train_step = 1
                            )
        
        img,event_img, img_label, eve_label,img_path = dataset[50]


        if len(img_label) == 0: # no label pass

            if len(eve_label) == 0:
                continue 
            
            else:
                data_count[int(eve_label[0,0])] += 1
                print(sub_path,":",eve_label[0,0])

        else:
            data_count[int(img_label[0,0])] += 1
            print(sub_path,":",img_label[0,0])

        del dataset

        

    return data_count


if __name__== "__main__":

    path = '/home_ssd/lhc/hand_detect_v3'

    counts  = count_dataset(path = path)          
    # 打印出数量以及一个柱状图
    print(counts)
    plt.bar(range(len(counts)), counts)
    # 保存图像‘
    plt.savefig(f'{path}_counts.png')



    '''
    [18054, 8804, 12696, 11038, 11076] X
    [12094, 11170, 13894, 12236, 12274] R
    [832, 687, 659, 666, 710]
    [1632, 1937, 1551, 1726, 1471]
    '''
    
    
    
