import os
import time
import torch
import numpy as np
import torch.nn as nn
from utils.pointnet2_utils import gather_operation, furthest_point_sample
from common.lib.metric import to_python_float, reduce_tensor, AverageMeter, accuracy
# from torchviz import make_dot


def train(train_loader, model, optimizer, scheduler, epoch, config, logger):
    stage_one_cls_loss = AverageMeter()
    stage_one_score_loss = AverageMeter()
    # stage_one_objectness_loss = AverageMeter()
    stage_one_grasp_pose_loss = AverageMeter()
    stage_one_normal_loss = AverageMeter()
    stage_one_distance_loss = AverageMeter()
    stage_one_suction_pose_loss = AverageMeter()
    stage_two_cls_loss = AverageMeter()
    top1 = AverageMeter()
    #top2 = AverageMeter()

    n_iteration_per_epoch = len(train_loader)
    if config.fix_back_bone:
        model.module.back_bone.eval()
        model.module.roi_head.train()
    else:
        model.train()
#    with torch.no_grad():
    end = time.time()
    for i, batch in enumerate(train_loader):
        data_time = time.time() - end
        net_time = time.time()
        # with torch.no_grad():
        data, label, dataset_idx = batch
        data = data.cuda(non_blocking=True)
        cls_label = label[0].cuda(non_blocking=True)
        score_label = label[1].cuda(non_blocking=True)
        pose_label = label[2].cuda(non_blocking=True)
        if config.train_grasp_approach:
            normals_label = label[4].cuda(non_blocking=True)
            distance_label = label[5].cuda(non_blocking=True)
        if config.train_two_stage or config.train_grasp_approach:
            matrix_label = label[3].cuda(non_blocking=True)
        if not config.non_uniform_sampling:
            idx = furthest_point_sample(data, int(config.num_points/config.sample_rate))
            data = gather_operation(
                data.transpose(1, 2).contiguous(), idx
                ).transpose(1, 2).contiguous()
            #data = data[idx].contiguous()
            cls_label = gather_operation(
                cls_label.float().unsqueeze(dim=2).transpose(1, 2).contiguous(),
                idx).squeeze().long().contiguous()
            pose_label = gather_operation(
                pose_label.transpose(1, 2).contiguous(),
                idx).transpose(1, 2).contiguous()
        #fg_mask = (cls_label.view(-1) > 0)
        optimizer.zero_grad()
        #loss, batch_dict, targets_dict = model(data, cls_label, pose_label)

        if config.train_grasp:
            if config.train_grasp_approach:
                kwargs = {'cls_label': cls_label, 'score_label': score_label, 'pose_label': pose_label,
                          'normals_label': normals_label, 'distance_label': distance_label,
                          'matrix_label': matrix_label,
                          'mode': 'train'}


            elif config.train_two_stage:
                kwargs = {'cls_label': cls_label, 'score_label': score_label, 'pose_label': pose_label,
                          'matrix_label': matrix_label, 'mode': 'train'}

        else:
            objectness_label = label[3].cuda(non_blocking=True)
            kwargs = {'cls_label': cls_label, 'score_label': score_label, 'pose_label': pose_label,
                      'objectness_label': objectness_label , 'mode': 'train'}
        batch_dict = model(data, **kwargs)

        batch_dict['total_loss'].backward()
        optimizer.step()
        #print('loss is :', batch_dict['stage_two_loss'].item())
        net_time = time.time() - net_time
        metric_time = time.time()
        if not config.use_focalloss:
            stage_one_prec1 = accuracy(batch_dict['batch_cls_preds'].data, cls_label, topk=(1, ))
            stage_one_prec1 = stage_one_prec1[0]
        if config.distributed:
            stage_one_reduced_cls_loss = reduce_tensor(batch_dict['stage_one_cls_loss'].data)
            stage_one_reduced_score_loss = reduce_tensor(batch_dict['stage_one_score_loss'].data)
            # stage_one_reduced_objectness_loss = reduce_tensor(batch_dict['stage_one_objectness_loss'].data)
            if config.train_grasp:
                if config.train_grasp_approach:
                    stage_one_reduced_grasp_pose_loss = reduce_tensor(batch_dict['stage_one_grasp_pose_loss'].data)
                    stage_one_reduced_normal_loss = reduce_tensor(batch_dict['stage_one_normal_loss'].data)
                    stage_one_reduced_distance_loss = reduce_tensor(batch_dict['stage_one_distance_loss'].data)
                else:
                    stage_one_reduced_grasp_pose_loss = reduce_tensor(batch_dict['stage_one_grasp_pose_loss'].data)
            if config.train_suction:
                stage_one_reduced_suction_pose_loss = reduce_tensor(batch_dict['stage_one_suction_pose_loss'].data)
            if config.train_two_stage:
                stage_two_reduced_cls_loss = reduce_tensor(batch_dict['stage_two_loss'].data)

            if not config.use_focalloss:
                stage_one_prec1 = reduce_tensor(stage_one_prec1)
                #prec2 = reduce_tensor(prec2)
        else:
            stage_one_reduced_cls_loss = batch_dict['stage_one_cls_loss'].data
            if config.train_grasp:
                if config.train_grasp_approach:
                    stage_one_reduced_grasp_pose_loss = reduce_tensor(batch_dict['stage_one_grasp_pose_loss'].data)
                    stage_one_reduced_normal_loss = reduce_tensor(batch_dict['stage_one_normal_loss'].data)
                    stage_one_reduced_distance_loss = reduce_tensor(batch_dict['stage_one_distance_loss'].data)
                else:
                    stage_one_reduced_grasp_pose_loss = reduce_tensor(batch_dict['stage_one_grasp_pose_loss'].data)
            if config.train_suction:
                stage_one_reduced_suction_pose_loss = batch_dict['stage_one_suction_pose_loss'].data
            if config.train_two_stage:
                stage_two_reduced_cls_loss = batch_dict['stage_two_loss'].data
        stage_one_cls_loss.update(to_python_float(stage_one_reduced_cls_loss), data.size(0))
        stage_one_score_loss.update(to_python_float(stage_one_reduced_score_loss), data.size(0))
        # stage_one_objectness_loss.update(to_python_float(stage_one_reduced_objectness_loss), data.size(0))
        if config.train_grasp:
            if config.train_grasp_approach:
                stage_one_grasp_pose_loss.update(to_python_float(stage_one_reduced_grasp_pose_loss), data.size(0))
                stage_one_normal_loss.update(to_python_float(stage_one_reduced_normal_loss), data.size(0))
                stage_one_distance_loss.update(to_python_float(stage_one_reduced_distance_loss), data.size(0))
            else:
                stage_one_grasp_pose_loss.update(to_python_float(stage_one_reduced_grasp_pose_loss), data.size(0))
        if config.train_suction:
            stage_one_suction_pose_loss.update(to_python_float(stage_one_reduced_suction_pose_loss), data.size(0))
        if config.train_two_stage:
            stage_two_cls_loss.update(to_python_float(stage_two_reduced_cls_loss), data.size(0))
        if not config.use_focalloss:
            top1.update(to_python_float(stage_one_prec1), data.size(0))
            #top2.update(to_python_float(prec2), data.size(0))
        metric_time = time.time() - metric_time

        if i % int(config.print_freq) == 0 and config.rank == 0:
            if not config.use_focalloss:
                logger.info('Train:[{0}] [{1}/{2}]\t'
                        'Data_time{3:.3f}\t'
                        'Net_time {4:.3f}\t'
                        'Metric_time {5:.3f}\t'
                        'Lr {6:.4f}\t'
                        'S-1 Clsloss: {stage1_cls_loss.avg:.4f}\t'
                        'Top1 is {top1.val:.4f} ({top1.avg:.4f})'.format(
                            epoch, i + 1, n_iteration_per_epoch, 
                            data_time, net_time, metric_time, scheduler.get_lr()[0],
                            stage1_cls_loss=stage_one_cls_loss,
                            top1=top1))
                if config.train_grasp:
                    if config.train_grasp_approach:
                        logger.info('S-1 Poseloss: {stage1_grasp_pose_loss.avg:.6f}'.format(
                            stage1_grasp_pose_loss=stage_one_grasp_pose_loss))
                        logger.info('S-1 Normal loss: {stage1_normal_loss.avg:.6f}'.format(
                            stage1_normal_loss=stage_one_normal_loss))
                        logger.info('S-1 Distance loss: {stage1_distance_loss.avg:.6f}'.format(
                            stage1_distance_loss=stage_one_distance_loss))
                    else:
                        logger.info('S-1 Poseloss: {stage1_grasp_pose_loss.avg:.6f}'.format(
                        stage1_grasp_pose_loss=stage_one_grasp_pose_loss))
                if config.train_suction:
                    logger.info('S-1 Poseloss: {stage1_suction_pose_loss.avg:.6f}'.format(
                        stage1_suction_pose_loss=stage_one_suction_pose_loss))
                if config.train_two_stage:
                    logger.info('S-2 Clsloss: {stage2_cls_loss.avg:.6f}'.format(stage2_cls_loss=stage_two_cls_loss))
            else:
                logger.info('Train:[{0}] [{1}/{2}]\t'
                            'Data_time{3:.3f}\t'
                            'Net_time {4:.3f}\t'
                            'Metric_time {5:.3f}\t'
                            'Lr {6:.4f}\t'
                            'S-1 CLS loss: {stage1_cls_loss.avg:.4f}\t'
                             'S-1 Score loss: {stage1_score_loss.avg:.4f}'
                              # 'S-1 Objectness loss: {stage1_objectness_loss.avg:.4f}'
                            .format(
                                epoch, i + 1, n_iteration_per_epoch, 
                                data_time, net_time, metric_time, scheduler.get_lr()[0],
                                stage1_cls_loss=stage_one_cls_loss,
                                stage1_score_loss=stage_one_score_loss))   #stage1_objectness_loss = stage_one_objectness_loss
                if config.train_grasp:
                    if config.train_grasp_approach:
                        logger.info('S-1 Poseloss: {stage1_grasp_pose_loss.avg:.6f}'.format(
                            stage1_grasp_pose_loss=stage_one_grasp_pose_loss))
                        logger.info('S-1 Normal loss: {stage1_normal_loss.avg:.6f}'.format(
                            stage1_normal_loss=stage_one_normal_loss))
                        logger.info('S-1 Distance loss: {stage1_distance_loss.avg:.6f}'.format(
                            stage1_distance_loss=stage_one_distance_loss))
                    else:
                        logger.info('S-1 Poseloss: {stage1_grasp_pose_loss.avg:.6f}'.format(
                            stage1_grasp_pose_loss=stage_one_grasp_pose_loss))
                if config.train_suction:
                    logger.info('S-1 Poseloss: {stage1_suction_pose_loss.avg:.6f}'.format(
                        stage1_suction_pose_loss=stage_one_suction_pose_loss))
                if config.train_two_stage:
                    logger.info('S-2 CLS loss: {stage2_cls_loss.avg:.6f}\t'.format(stage2_cls_loss=stage_two_cls_loss))

        end = time.time()
    if not config.use_focalloss:
        return top1.avg
#        return None
    else:
        return stage_one_cls_loss.avg, stage_one_grasp_pose_loss.avg, stage_one_normal_loss.avg,stage_one_score_loss.avg,stage_one_distance_loss.avg
#        return None, None
