'''
Author: Yan Jianhang
Date: 2022-04-12 15:53:32
LastEditors: Yan Jianhang
LastEditTime: 2022-05-03 19:20:28
FilePath: /STransGrasp/lib/fk/vis_grasp.py
Description: FK layer demo && vis grasp
'''
import torch
import os, sys, argparse, yaml, re
import numpy as np
import open3d as o3d
import open3d.visualization as vis
sys.path.append('./')
from lib.fk.FK_layer import FK_layer
from lib.fk.utils import o3d_read_mesh
from lib.fk.vis_hand_mesh import get_hand_mesh_under_grasp_pose

def parse_idx_range(s):
    '''Accept either a comma separated list of numbers 'a,b,c' or a range 'a-c' and return as a list of ints.'''

    range_re = re.compile(r'^(\d+)-(\d+)$')
    m = range_re.match(s)
    if m:
        return list(range(int(m.group(1)), int(m.group(2))+1))
    vals = s.split(',')
    return [int(x) for x in vals]

parser = argparse.ArgumentParser()
parser.add_argument('--data_root', type=str, default='datasets')
parser.add_argument('--category', type=str, default='mug')
parser.add_argument('--mode', type=str, default='train')
parser.add_argument('--exp_name', type=str, default='label')
parser.add_argument('--grasp_id', type=str, default='3')
parser.add_argument('--inst_id', type=parse_idx_range, default='')
opt = parser.parse_args()

def show_grasp(inst_path, grasp_pose):
    palm_q = grasp_pose['palm_q'][[3,0,1,2]]       # [x,y,z,w] -> [w,x,y,z]
    base = np.concatenate([grasp_pose['palm_t'], palm_q], axis=0)
    rotations = grasp_pose['joints'][2:24]
    rotations_cuda = torch.FloatTensor(rotations).reshape(1, -1).cuda()
    base = torch.FloatTensor(base).reshape(1, -1).cuda()

    mesh_path = "datasets/obj/{}.ply".format(inst_path.replace('eval_pc', 'eval'))
    mesh = o3d_read_mesh(mesh_path, scale=grasp_pose['obj_scale'])
    
    show_list = []
    show_list.append(mesh)
    mesh.paint_uniform_color([128/255, 128/255, 128/255])
    '''get point cloud of robot hand under grasp pose'''
    # model = FK_layer(base, rotations_cuda)
    # model.to('cuda')
    # positions, transformed_pts = model()
    # pcd = o3d.geometry.PointCloud()
    # transformed_pts = transformed_pts[0].detach().cpu().numpy()
    # pcd.points = o3d.utility.Vector3dVector(transformed_pts)
    # show_list.append(pcd)
    '''get mesh of robot hand under grasp pose'''
    root_qt = np.concatenate([palm_q, grasp_pose['palm_t']], axis=0)
    for i in get_hand_mesh_under_grasp_pose(rotations, root_qt):
        show_list.append(i)
    vis.draw(show_list, raw_mode=False, show_skybox=False)
    
if __name__ == "__main__":
    if opt.mode == 'train':
        with open('cfg/class_cfg.yml', 'r') as stream:
            cls_cfg = yaml.safe_load(stream)        
        inst_name = cls_cfg[opt.category]['src_inst_name']
        inst_path = os.path.join(opt.category, opt.mode, inst_name, '0')
        grasp_file = "grasp_data/{}/{}_grasp_{}.npz".format(inst_path, opt.exp_name, opt.grasp_id)
        grasp_pose = np.load(grasp_file)
        show_grasp(inst_path, grasp_pose)
        
    else:
        eval_inst_list = []
        with open(f'dif/split/eval/{opt.category}.txt', 'r') as f:
            data = f.readline().rstrip('\n')
            while data != '':
                eval_inst_list.append(data)
                data = f.readline().rstrip('\n')        
        for idx in opt.inst_id:
            inst = eval_inst_list[idx]
            inst_path = os.path.join(opt.category, opt.mode, inst)
            grasp_file = "grasp_data/{}/{}_grasp_{}.npz".format(inst_path, opt.exp_name, opt.grasp_id)
            grasp_pose = np.load(grasp_file)
            show_grasp(inst_path, grasp_pose)