
from os.path import join, split, exists,  isdir,dirname,basename

import os 
import sys 



sys.path.append(os.getcwd())
sys.path.append(join(os.getcwd(),'base_utils'))


# os.chdir(dirname(os.getcwd()))
# sys.path.append(join(os.getcwd(),'base_utils'))

import shutil
from tools.utils import * 
import pdb
from matplotlib import legend
from datasets.loader import *  
from tools.utils import * 
from datasets.loader import *
from datasets.split_with_pidx import *

import online_tracking_results_pb2
from glob import glob 


from PIL import Image
import io

from loguru import logger 

from torch.utils.data import Dataset, DataLoader 



def save_online_tracking_results_pb2_tracks(online_tracking_results_pb2_tracks,save_path):
    with open(save_path, "wb") as f:
        f.write(online_tracking_results_pb2_tracks.SerializeToString())



def read_save_online_tracking_results_pb2_tracks(file_name):
    tracking_res = online_tracking_results_pb2.Tracks()
        
    with open(file_name, 'rb') as f:
        # channel = os.path.basename(file_name).split("-")[1]
        tracking_res.ParseFromString(f.read())
    return tracking_res





def mp_func(src_sv_pb_file):
    tracks = read_save_online_tracking_results_pb2_tracks(src_sv_pb_file)
    if len(tracks.tracks) !=0:
        for x in tracks.tracks:
            image = Image.open(io.BytesIO(x.box_patches))
            image.save(join(bps_dir, '%s.jpg'%(x.track_id)))



class PBUpdator:


    
    def __init__(self,path, new_task_score_file_name, save_dir ):
        self.pbs = glob( join(path, '*.pb'))
        self.new_staff_score  = load_json(new_task_score_file_name)

        logger.info('%d  pbs and %d new staff scores loaded'%(len(self.pbs), len(self.new_staff_score)))
        self.save_dir = save_dir

        make_dir(self.save_dir)

    def __len__(self, ):

        return len(self.pbs)


    def __call__(self,idx):

        tracks = read_save_online_tracking_results_pb2_tracks(self.pbs[idx])
        track_num  = len(tracks.tracks) 
        # pdb.set_trace()
        if track_num !=0:
            # logger.info('there are %d track in this pb'%(track_num))
            for x in tracks.tracks:
                assert self.new_staff_score.get(x.track_id, None) is not None, 'there is not staff score in new staff score dict'
                #* replace 
                # logger.info('origin staff score is %f'%(x.staff_result.confidence ))
                #* update 
                x.staff_result.confidence = self.new_staff_score[x.track_id]
                # logger.info('new staff score is %f'%(x.staff_result.confidence ))

        #* save
        
        
        with open(join(self.save_dir, basename(self.pbs[idx])), "wb") as f:
            f.write(tracks.SerializeToString())

        return tracks

        


class PBLoader:


    def __init__(self,path,transform = None):
        self.pbs = glob( join(path, '*.pb'))

        self.bps = self.initbps()
        self.keys = list(self.pbs.keys())
        self.transform = transform




    def initbps(self):
        bps = {}
        for idx in tqdm(range(len(self.pbs))):
            tracks = read_save_online_tracking_results_pb2_tracks(self.pbs[idx])
            if len(tracks.tracks) !=0:
                for x in tracks.tracks:
                    bps[x.track_id] = x.box_patches

        return bps 
         
    

    def __len__(self):
        return len(self.keys)



    def getitem(self, idx):
        key = self.keys[idx]
        
        img =  Image.open(io.BytesIO(self.bps[key]))

        if self.transform:
            img = self.transform(img)

        return img

        


if __name__ == "__main__":

    root = '/root/exp/data/qamall_data/full_data/qa-mall-pb-adjustment/CR_zhongshan_wxhpoc_20240401_online_llyue_pb_s/20241113-125902'
    

    # src_sv_pb_files = glob(join(root,'sc_pbs', '*.pb'))
    # bps_dir = join(root,'sc_body_patches' )
    # make_dir(bps_dir)
    # logger.info('results, %d images in total, will be saved at %s, '%(len(src_sv_pb_files), bps_dir))
    
    tgt_dir = join(root,'sc_pbs_staff_score_updated' )
    make_dir(tgt_dir)

    
    # multiprocess_run(mp_func,src_sv_pb_files)
    # loader = PBLoader(join(root, 'pbs'))

    #* update
    """



    """
    
    inference_res_path = "/root/exp/fastreid/logs/mall_fashion/bagtricks_R50#inference#20241114#17-09-1731575394/staff_score.json"

    updator = PBUpdator(join(root, 'sc_pbs'), \
        new_task_score_file_name=  inference_res_path, 
        save_dir = tgt_dir)
    

    
    for idx in tqdm(range(updator.__len__())):
        updator(idx)

    # logger.info('results have  been saved at %s'%(bps_dir))

