import torch
import os, sys, argparse, yaml, re
import numpy as np
import trimesh
sys.path.append('./')
from lib.fk.vis_hand_mesh_trimesh_ver import get_hand_mesh_under_grasp_pose
from tools.utils import trimesh_scene_to_mesh
from lib.fk.FK_layer import FK_layer

def parse_idx_range(s):
    '''Accept either a comma separated list of numbers 'a,b,c' or a range 'a-c' and return as a list of ints.'''

    range_re = re.compile(r'^(\d+)-(\d+)$')
    m = range_re.match(s)
    if m:
        return list(range(int(m.group(1)), int(m.group(2))+1))
    vals = s.split(',')
    return [int(x) for x in vals]

parser = argparse.ArgumentParser()
parser.add_argument('--data_root', type=str, default='datasets')
parser.add_argument('--category', type=str, default='mug')
parser.add_argument('--mode', type=str, default='train')
parser.add_argument('--exp_name', type=str, default='label')
parser.add_argument('--grasp_id', type=str, default='3')
parser.add_argument('--inst_id', type=parse_idx_range, default='0')
opt = parser.parse_args()

def set_color(mesh:trimesh.Trimesh, hand_pc, inst_path, mode='train'):
    if mode == 'train':
        d = np.load(f'grasp_data/{inst_path}/label_grasp_{opt.grasp_id}_contact_info.npz')
        pc = trimesh.PointCloud(hand_pc[d['contact_idx']], colors=np.array([0,0,255,255]))
    elif mode == 'eval':
        d = np.load(f'grasp_data/{inst_path}/grasp_{opt.grasp_id}_points_on_surface.npz')
        scale = d['obj_scale']
        grasp_pts = d["grasp_points"] * scale * 0.5
        pc = trimesh.PointCloud(grasp_pts, colors=np.array([255,0,0,255]))
    return pc
    
    
def show_grasp(inst_path, grasp_pose):
    palm_q = grasp_pose['palm_q'][[3,0,1,2]]       # [x,y,z,w] -> [w,x,y,z]
    base = np.concatenate([grasp_pose['palm_t'], palm_q], axis=0)
    rotations = grasp_pose['joints'][2:24]
    rotations_cuda = torch.FloatTensor(rotations).reshape(1, -1).cuda()
    base = torch.FloatTensor(base).reshape(1, -1).cuda()

    s = 1.0
    tmp_path = inst_path.replace('eval_pc', 'eval')[:-2]
    if os.path.exists(f'assets/urdf/shapenet/{tmp_path}/align_ds/model_align.obj'):
        mesh_path = f'assets/urdf/shapenet/{tmp_path}/align_ds/model_align.obj'
    else:
        mesh_path = f"datasets/obj/{inst_path}.ply"
        if opt.category == 'mug' and opt.mode == 'train':
            # mesh_path = '/media/wind/Share/Projects/STransGrasp/dif/recon/mug_1106/train/test0513.ply'
            # s = 0.5
            mesh_path ='datasets/obj/mug/eval/e6dedae946ff5265a95fb60c110b25aa/4.ply'
    print(mesh_path)
    mesh = trimesh.load(mesh_path)
    # print(grasp_pose['obj_scale'])
    # mesh.apply_scale(grasp_pose['obj_scale']*s)
    mesh.apply_scale(0.24936792593502233)
    # meshcolors = np.array([86, 180, 223, 255])  # 莫蓝色
    # meshcolors = np.array([0xa8, 0x82, 0xf6, 200])  # 紫色
    meshcolors = np.array([200,200,200,150])
    if isinstance(mesh, trimesh.Scene):
        mesh = trimesh_scene_to_mesh(mesh)
    mesh.visual.face_colors = meshcolors
    # mesh.visual.face_colors[:, 3] = 200
    
    '''get point cloud of robot hand under grasp pose'''
    # rotations_cuda = torch.zeros_like(rotations_cuda).cuda()
    model = FK_layer(base, rotations_cuda)
    model.to('cuda')
    positions, transformed_pts = model()
    positions = positions[0].detach().cpu().numpy()
    transformed_pts = transformed_pts[0].detach().cpu().numpy()

    show_list = []
    show_list.append(mesh)
    mesh_pts, _ = trimesh.sample.sample_surface_even(mesh, 10000)
    ## object points
    # show_list.append(trimesh.PointCloud(mesh_pts))
    ## contact points
    transformed_pts[:,0] += 0.05 
    show_list.append(set_color(mesh, transformed_pts, inst_path))
    transformed_pts[:,0] -= 0.05 
    show_list.append(set_color(mesh, transformed_pts, 'mug/eval/e6dedae946ff5265a95fb60c110b25aa/4', 'eval'))
    ## hand pc
    hand_pc = trimesh.PointCloud(transformed_pts)
    # show_list.append(hand_pc)
    ## hand keypoints
    hand_key_points = trimesh.PointCloud(positions, colors=np.array([255,0,0,255]))
    # show_list.append(hand_key_points)
    '''get mesh of robot hand under grasp pose'''
    root_qt = np.concatenate([palm_q, grasp_pose['palm_t']], axis=0)
    # rotations = np.zeros_like(rotations)
    for i in get_hand_mesh_under_grasp_pose(rotations, root_qt, np.array([0xff, 0xff, 0xff, 180])):
        t=np.eye(4)
        t[0,3] = 0.05
        i.apply_transform(t)
        show_list.append(i)
    
    return show_list
    
if __name__ == "__main__":
    if opt.mode == 'train':
        with open('cfg/class_cfg.yml', 'r') as stream:
            cls_cfg = yaml.safe_load(stream)    
        inst_name = cls_cfg[opt.category]['src_inst_name']
        inst_path = os.path.join(opt.category, opt.mode, inst_name, '0')
        grasp_file = "grasp_data/{}/{}_grasp_{}.npz".format(inst_path, opt.exp_name, opt.grasp_id)
        grasp_pose = np.load(grasp_file)
        show_list = show_grasp(inst_path, grasp_pose)
        trimesh.Scene(show_list).show()
    else:
        eval_inst_list = []
        with open(f'dif/split/eval/{opt.category}.txt', 'r') as f:
            data = f.readline().rstrip('\n')
            while data != '':
                eval_inst_list.append(data)
                data = f.readline().rstrip('\n')   
        # for idx in opt.inst_id:
        #     inst = eval_inst_list[idx]
        #     inst_path = os.path.join(opt.category, opt.mode, inst)
        #     grasp_file = "grasp_data/{}/{}_grasp_{}.npz".format(inst_path, opt.exp_name, opt.grasp_id)
        #     grasp_pose = np.load(grasp_file)
        #     show_list = show_grasp(inst_path, grasp_pose)
        
        # y_offset = [0.4, 0.2, 0, -0.2]
        y_offset = [0.0]
        for idx in opt.inst_id:
            i = 0
            merge_show_list = []
            # for exp in ['dm', 'trans', 'ct', 'all']:
            for exp in ['all']:
                inst = eval_inst_list[idx]
                inst_path = os.path.join(opt.category, opt.mode, inst)
                grasp_file = "grasp_data/{}/{}_grasp_{}.npz".format(inst_path, exp, opt.grasp_id)
                grasp_pose = np.load(grasp_file)
                show_list = show_grasp(inst_path, grasp_pose)
                for o in show_list:
                    o.vertices[:, 1] += y_offset[i]
                    merge_show_list.append(o)
                i += 1
            trimesh.Scene(merge_show_list).show()