'''
transfer grasp points by nearest neigbhor searching.
'''
import argparse, os, yaml, sys, pickle
import numpy as np
import torch
import time
import trimesh
sys.path.append('./')
sys.path.append('./dif')
from lib.fk.FK_layer import FK_layer
from dif.dif_net import DeformedImplicitField
from shape_encoder.model import ShapeEncoder
from shape_encoder.data import PartPointsDatset
from dif.sdf_meshing import create_mesh


parser = argparse.ArgumentParser()
parser.add_argument('--data_root', type=str, default='datasets')
parser.add_argument('--category', type=str, default='mug')
parser.add_argument('--mode', type=str, default='eval')
parser.add_argument('--num_points', type=int, default=1024)
parser.add_argument('--shape_model_path', type=str, default='')
parser.add_argument('--resolution', type=int, default=64)
parser.add_argument("--batch_size", type=int, default=4)
parser.add_argument("--grasp_id", type=str, default = "0")
parser.add_argument("--gt_code", action="store_true", default=False)
parser.add_argument("--vis", action="store_true", default=False)
opt = parser.parse_args()


def transfer_grasp_points_to_Template_Field(category, dif_model, grasp_id=1):
    
    with open('cfg/class_cfg.yml','r') as stream: 
            class_cfg = yaml.safe_load(stream)
            
    src_inst = class_cfg[category]['src_inst_name']
    
    label_file_path = f"grasp_label/{category}/train/{src_inst}/0/label_grasp_{grasp_id}.npz"
    label = np.load(label_file_path)
    palm_q = label['palm_q'][[3,0,1,2]]      # [x,y,z,w] -> [w,x,y,z]
    base = np.concatenate([label['palm_t'], palm_q], axis=0)
    rotations = label['joints'][2:24]
    rotations = torch.FloatTensor(rotations).reshape(1, -1).cuda()
    base      = torch.FloatTensor(base).reshape(1, -1).cuda()
    model = FK_layer(base, rotations)
    model.to('cuda')
    key_pts, hand_pts = model()
    hand_pts /= 0.5 * label["obj_scale"]
    # get dif's code of source object
    with open(os.path.join(opt.data_root, 'gt_codes', '{0}_{1}.pkl'.format(category, 'train')), 'rb') as f:
        codes = pickle.load(f)
    code = torch.from_numpy(codes[f'{src_inst}/0']).cuda()
    hand_pts_template = dif_model.get_template_coords(hand_pts, code) # [1, hand pts num, 3]
    hand_pts_sdf = dif_model.get_template_field(hand_pts_template) # [1, hand pts num, 1]
    
    inner_idx = torch.norm(hand_pts_template, dim=2)<0.99
    contact_index = torch.abs(hand_pts_sdf[:,:,0])<1e-3
    non_contact_index = hand_pts_sdf[:,:,0]>1e-1
    
    contact_index = contact_index & inner_idx
    non_contact_index = non_contact_index & inner_idx
    inner_index = torch.where(inner_idx == True)[1]
    contact_index = torch.where(contact_index == True)[1]
    non_contact_index = torch.where(non_contact_index == True)[1]
    contact_pts_template = hand_pts_template[:, contact_index]
    print(contact_pts_template.size())
    ## vis
    # mesh = trimesh.load(f'dif/recon/mug_expand_0_correct/train/template.ply')
    # # mesh = trimesh.load(f'dif/recon/mug_expand_0_correct/train/test0513.ply')
    # mesh.visual.face_colors[:,3] = 160
    # pc = trimesh.PointCloud(hand_pts_template[0, contact_index].detach().cpu().numpy(), colors=np.array([255,0,0,255]))
    # pc_all = trimesh.PointCloud(hand_pts_template[0, inner_index].detach().cpu().numpy(), colors=np.array([0,255,0,255]))
    # pc_non = trimesh.PointCloud(hand_pts_template[0, non_contact_index].detach().cpu().numpy(), colors=np.array([0,0,255,255]))
    # trimesh.Scene([mesh, pc_all, pc, pc_non]).show()
    
    inner_idx = inner_idx.detach().cpu().numpy()
    contact_index = contact_index.detach().cpu().numpy()
    non_contact_index = non_contact_index.detach().cpu().numpy()
    
    contact_info_file_path = f"grasp_label/{category}/train/{src_inst}/0/label_grasp_{grasp_id}_contact_info"
    np.savez(contact_info_file_path, contact_idx=contact_index, non_contact_idx=non_contact_index)
    return contact_pts_template, label

def get_query_points(N=64):
    voxel_origin = [-1, -1, -1]
    voxel_size = 2.0 / (N - 1)
    overall_index = torch.arange(0, N ** 3, 1, out=torch.LongTensor())
    samples = torch.zeros(N ** 3, 3)
    # transform first 3 columns to be the x, y, z index
    samples[:, 2] = overall_index % N
    samples[:, 1] = (overall_index.long() // N) % N
    samples[:, 0] = ((overall_index.long() // N) // N) % N
    # transform first 3 columns to be the x, y, z coordinate
    samples[:, 0] = (samples[:, 0] * voxel_size) + voxel_origin[2]
    samples[:, 1] = (samples[:, 1] * voxel_size) + voxel_origin[1]
    samples[:, 2] = (samples[:, 2] * voxel_size) + voxel_origin[0]
    samples.requires_grad = False
    return samples

if __name__ == '__main__':
    print(opt)
    opt.dif_config = 'dif/configs/generate/{0}.yml'.format(opt.category)

    dataset = PartPointsDatset(opt.data_root, opt.category, opt.mode, opt.num_points)
    dataloader = torch.utils.data.DataLoader(dataset, batch_size=opt.batch_size, shuffle=False, num_workers=8, pin_memory=True)

    if not opt.gt_code:
        shape_encoder = ShapeEncoder()
        if opt.shape_model_path == '':
            opt.shape_model_path = f'shape_encoder/output/{opt.category}/checkpoints/model_best.pth'
        
        shape_encoder.load_state_dict(torch.load(opt.shape_model_path))
        shape_encoder.eval()
        shape_encoder.cuda()

    with open(os.path.join(opt.dif_config),'r') as stream: 
        meta_params = yaml.safe_load(stream)
    dif_model = DeformedImplicitField(**meta_params)
    dif_model.load_state_dict(torch.load(meta_params['checkpoint_path']))
    dif_model.cuda()

    # transfer grasp points from labeled model to template field
    g_pts_temp, label = transfer_grasp_points_to_Template_Field(opt.category, dif_model, opt.grasp_id)
    g_pts_inst = np.zeros([g_pts_temp.size()[1], 3])
    g_pts_inst_weight = np.zeros([g_pts_temp.size()[1], 1])
    
    q_pts = get_query_points(opt.resolution).cuda()[None, ...]

    # exit()
    # i = -1
    for obj_pcs, gt_codes, file_names, numbers in dataloader:
        # i += 1 
        # if i not in [105]:
        #     continue
        # if file_names[0] != 'edaf960fb6afdadc4cebc4b5998de5d0/0':
        #     continue
        print(file_names)
        cuda_obj_pcs = obj_pcs.cuda()
        cuda_gt_codes = gt_codes.cuda()
        if opt.gt_code:
            cuda_pred_codes = cuda_gt_codes
        else:
            cuda_pred_codes = shape_encoder(cuda_obj_pcs)

        for i in range(obj_pcs.size()[0]): # batch size
            save_dir = os.path.join("grasp_data", opt.category, opt.mode, file_names[i])
            os.makedirs(save_dir, exist_ok=True)
            # if os.path.exists(os.path.join(save_dir, "grasp_{}_points_on_surface.npz".format(opt.grasp_id))):
            #     continue
            
            if opt.gt_code:
                mesh = trimesh.load(f"{opt.data_root}/obj/{opt.category}/{opt.mode}/{file_names[i]}.ply")
                mesh_pts, _ = trimesh.sample.sample_surface_even(mesh, 10000)
                mesh_pts *= 2
                
                ## 给老师画图
                # mesh_ply_path = f"{opt.data_root}/obj/{opt.category}/{opt.mode}/{file_names[i]}.ply"
                # mesh_obj_path = f"{opt.data_root}/obj/{opt.category}/{opt.mode}/{file_names[i]}.obj"
                # if not os.path.exists(mesh_obj_path):
                #     os.system(f'meshlabserver -i {mesh_ply_path} -o {mesh_obj_path} -m vc vn')
                #     os.system(f'manifoldv2 --input {mesh_obj_path} --output {mesh_obj_path}')
                # mesh = trimesh.load(mesh_obj_path)
                # mesh_pts = mesh.vertices * 2
            else:
                mesh_pts = create_mesh(dif_model, filename='', embedding=cuda_pred_codes[i], N=opt.resolution, get_color=False)
                
            # if opt.vis:
            #     print(mesh_pts.shape[0])
            #     pc = trimesh.PointCloud(mesh_pts)
            #     trimesh.Scene([pc]).show()

            # TODO: query points be free points or surface points ?
            if True:
                q_pts = torch.FloatTensor(mesh_pts).cuda()[None, ...]

            q_pts_dfm = dif_model.get_template_coords(q_pts, cuda_pred_codes[i])
            print("grasp points on template size is :    {}.".format(g_pts_temp.size()))
            print("deformed points on instance size is : {}.".format(q_pts_dfm.size()))
            
            # if file_names[i] == 'e6dedae946ff5265a95fb60c110b25aa/4':
            #     mesh = trimesh.load(f'/media/wind/Share/Projects/STransGrasp/assets/urdf/shapenet/mug/train/template/template.ply')
            #     mesh.visual.face_colors[:,3] = 160
            #     pc = trimesh.PointCloud(q_pts_dfm[0].detach().cpu().numpy(), colors=np.array([0,0,0,255]))
            #     trimesh.Scene([mesh, pc]).show()
            
            # 给老师画图
            # mesh_colors = q_pts_dfm[0].detach().cpu().numpy()
            # mesh_colors = np.clip(mesh_colors/2+0.6,0,1) * 255
            # mesh_colors = np.concatenate([mesh_colors, np.ones((mesh_colors.shape[0], 1)) * 255], axis=1)
            # mesh.visual.vertex_colors = mesh_colors
            # mesh.show()
                            
            start_time = time.time()
            for j in range(g_pts_temp.size()[1]):
                dist = torch.norm(q_pts_dfm - g_pts_temp[0, j, :], dim=2)
                index = torch.argmin(torch.norm(q_pts_dfm - g_pts_temp[0, j, :], dim=2), dim=1)
                g_pts_inst[j, :] = q_pts[0, index, :].detach().cpu().numpy()
                g_pts_inst_weight[j, :] = torch.exp(-dist[0, index]).detach().cpu().numpy()
            
            print("transfer one grasp on an instance used {} seconds.".format(time.time() - start_time))
            
            if not opt.vis:
                code = cuda_pred_codes[i].detach().cpu().numpy()
                gt_sRT = np.load(os.path.join(opt.data_root, 'render_pc', opt.category, opt.mode, file_names[i], 'PC_cam_sRT_{0}.npz'.format(numbers[i])))
                obj_scale = gt_sRT['scale']
                # if opt.category == 'mug' and opt.grasp_id == '3':
                #     obj_scale *= 1.5
                np.savez(os.path.join(save_dir, "grasp_{}_points_on_surface".format(opt.grasp_id)), grasp_points=g_pts_inst, obj_scale=obj_scale, code=code, grasp_points_weight=g_pts_inst_weight)
                # direct mapping
                np.savez(os.path.join(save_dir, "dm_grasp_{}".format(opt.grasp_id)), q=label["q"], t=label["t"], palm_q=label["palm_q"], palm_t=label["palm_t"], joints=label["joints"], obj_scale=obj_scale)
                # exit()