from genericpath import isfile
import os 



from os.path import join,dirname, split, exists,isdir

import numpy as np



from os.path import join, split, exists, basename
import os 
import json
from tqdm import tqdm

class JsonExporter:


    def __init__(self, root,save_path, suffix = 'png'):

        all_samples = []
        
        
        self.suffix = suffix
        self.save_path = save_path
        
        
        for sample_name in os.listdir(root):
            samples = self.get_samples(join(root, sample_name))
            all_samples += samples

            

        self.data = all_samples
        
    

    def get_samples(self, path):

        intrinsic = [x.strip(' ') for x in np.loadtxt(join(path,'k.txt'), dtype= str, delimiter=',')]
        data = []
        relative_path = '/'.join(path.split('/')[-2:])
        for name in os.listdir(path):
            if isdir(join(path,name)):

                data += [{'data_path': join(relative_path,name,x), 'intrinsic': intrinsic } for x in os.listdir(join(path,name)) if x.endswith(self.suffix)]

        return data


    def export(self,):
        import json
        with open(self.save_path,'w') as f :
            for sample in self.data:
                f.write(json.dumps(sample ) + '\n')
                

            


class NaiveExporter:
        

    def __init__(self, root) -> None:
            
        self.root = root 

        self.conditioning_images_path = join(self.root,'conditioning_images')
        self.normal_images_path = join(self.root,'normal_images')
        self.depth_images_path = join(self.root,'depth_images')

        

        
        # for conditioning, depth, normal in zip(sorted(os.listdir(conditioning_images_path)), sorted(os.listdir(depth_images_path)),sorted(os.listdir(normal_images_path))):
            
        #     pass

    def export_depth(self, export_path):
        
        with open(export_path,'w') as f :
            
            
            for conditioning, depth in tqdm(zip(sorted(os.listdir(self.conditioning_images_path)), sorted(os.listdir(self.depth_images_path)))):
                line = {'conditioning_image':join('conditioning_images', conditioning),'image':join('depth_images', depth)}
                f.write(json.dumps(line) + '\n')
                

            
class XYZJsonlExporter:


    def __init__(self, root, subset_names):
        self.root = root 
        self.subset_names = subset_names


        self.jsonl_list  = self.load_all()

        
        

        
    def load_one_sample(self,path):

        
        
        intrinsic = [x.strip(' ') for x in np.loadtxt(join(path,'k.txt'), dtype= str, delimiter=',')]

        
        depth_list = [ ]
        gray_list = [ ]
        mask_list = [ ]
        
        for  image_name in os.listdir(join(path, 'depth')):
            
            
            if image_name.endswith('.png'):

                depth_path = join(path, 'depth',image_name)
                depth_list.append(depth_path)
            

                gray_path = depth_path.replace('depth', 'gray')    
                if not exists(gray_path): 
                    gray_path = depth_path.replace('depth', 'rgb') 
                gray_list.append(gray_path)

                
                
                    
                mask_path = depth_path.replace('depth', 'mask')
                mask_list.append(mask_path)


                

        return depth_list,gray_list,mask_list,intrinsic


    """
        load the coarse depth as well 
    """
    def load_one_sample2(self,path):

        
        
        intrinsic = [x.strip(' ') for x in np.loadtxt(join(path,'k.txt'), dtype= str, delimiter=',')]

        
        depth_list = [ ]
        gray_list = [ ]
        mask_list = [ ]


        coarse_depth_list = [ ]
        predict_intrinsic_list = [ ]
        
        for  image_name in os.listdir(join(path, 'depth')):
            
            
            if image_name.endswith('.png'):

                depth_path = join(path, 'depth',image_name)
                depth_list.append(depth_path)
            

                gray_path = depth_path.replace('depth', 'gray')    
                if not exists(gray_path): 
                    gray_path = depth_path.replace('depth', 'rgb') 
                gray_list.append(gray_path)

                #* load moge output
                coarse_depth = depth_path.replace('png', 'npy')
                pred_intrinsic = depth_path.replace('png', 'json')
                assert exists(coarse_depth), f"{coarse_depth} does not exist"
                assert exists(pred_intrinsic), f"{pred_intrinsic} does not exist"
                coarse_depth_list.append(coarse_depth)
                predict_intrinsic_list.append(pred_intrinsic)

                
                    
                mask_path = depth_path.replace('depth', 'mask')
                mask_list.append(mask_path)


                

        return depth_list,gray_list,mask_list,intrinsic,coarse_depth_list, predict_intrinsic_list



    def load_subset(self, subset_name):

        jsonl_list = []

        for sample_name in os.listdir(join(self.root,subset_name)):
            path = join(self.root, subset_name, sample_name)

            if  exists(path) and isdir(path):
                
                # Process each sample

                """
                
                depth_list,gray_list,mask_list,intrinsic = self.load_one_sample(path)

                for x,y,z in zip(depth_list,gray_list,mask_list,):

                    jsonl_list.append({
                        'conditioning_image': y,
                        'image': x,
                        'mask': z,
                        'intrinsic': intrinsic
                    })
                """

                depth_list,gray_list,mask_list,intrinsic, coarse_depth, predict_intrinsic = self.load_one_sample2(path)
                for x,y,z, c,p in zip(depth_list,gray_list,mask_list,coarse_depth, predict_intrinsic  ):

                    jsonl_list.append({
                        'conditioning_image': y,
                        'image': x,
                        'mask': z,
                        'coarse_depth': c,
                        'pred_intrinsic': p,
                        'intrinsic': intrinsic
                    })

        return jsonl_list

 

    def load_all(self):
        jsonls = []
        for subset in self.subset_names:
            subset_jsonl = self.load_subset(subset)
            jsonls += subset_jsonl

            print(f'there are {len(subset_jsonl)} in {subset}')


        return jsonls



    def __call__(self,save_path = None):
        if save_path is None:
            save_path = join(self.root, 'train.jsonl')

        with open(save_path, 'w') as f:
            

            for item in tqdm(self.jsonl_list):

                f.write(json.dumps(item) + '\n')

        print(f"Saved to {save_path}")





        
if __name__ =="__main__":
    """"
    


    # src ='/share/project/cwm/shaocong.xu/exp/ml-depth-pro/data/cleargrasp_processed'
    src ='/share/project/cwm/shaocong.xu/exp/Lotus/data/cleargrasp-dataset-test-val_processed_synthetic_val'


    exporter = NaiveExporter(src)
    exporter.export_depth(f"{src}/test.jsonl")


    """

    exporter = JsonExporter('/share/project/cwm/shaocong.xu/exp/GeometryCrafter/data/tricky_nogt_2025_test/testset',
                            save_path = '/share/project/cwm/shaocong.xu/exp/GeometryCrafter/data/tricky_nogt_2025_test/test.jsonl')

    exporter.export()


    exit(0)
    

    #* XYZ exporter 
    import numpy as np  

    xyz_path = '/share/project/cwm/shaocong.xu/exp/Lotus/data/XYZ'

    from os.path import isdir

    exporter = XYZJsonlExporter(xyz_path,subset_names=['real', 'tricky_synthetic'])

    exporter()
            
        

