import os
import torch
import logging
import numpy as np

from collections import OrderedDict
import cv2

def load_checkpoint(model, checkpointPath):
    if not os.path.exists(checkpointPath):
        raise ValueError('checkpoint {} is not exist.'.format(checkpointPath))
    try:
        checkpoint = torch.load(checkpointPath, map_location="cpu")['state_dict']
    except:
        checkpoint = torch.load(checkpointPath, map_location="cpu")
    checkpoint_ = OrderedDict()

    for name in checkpoint.keys():
        new_name = name.replace("module", "")
        checkpoint_[new_name] = checkpoint[name]

    state_dict = model.state_dict()
    for name in state_dict.keys():
        new_name = name.replace("module", "")
        if checkpoint_[new_name].shape != state_dict[name].shape:
            logging.info('{0} does not initialize'.format(name))
            continue
        state_dict[name] = checkpoint_[new_name]
    model.load_state_dict(state_dict, strict=True)

def get_optim(model, config):
    if config.train.optimizer == 'sgd':
        optimizer = torch.optim.SGD(model.parameters(), lr=config.train.lr, momentum=0.9,
                                    weight_decay=config.model.weight_decay)

    elif config.train.optimizer == 'adam':
        optimizer = torch.optim.Adam(model.parameters(), lr=config.train.lr,
                                     weight_decay=config.model.weight_decay)
    else:
        raise ValueError("optimizer way should be the one of 'sgd, adam'")
    return optimizer

def compute_nme(landmarks_gt, landmarks):
    N = landmarks.size(0)
    landmarks_gt = landmarks_gt.reshape(N, -1, 2)
    landmarks = landmarks.reshape(N, -1, 2)

    rmse = [0 for i in range(N)]
    for i in range(N):
        pts_gt = landmarks_gt[i, ...]
        pts_pre = landmarks[i, ...]

        if pts_gt.size(0) == 106:
            eye_corner_left = pts_gt[52, :]
            eye_corner_right = pts_gt[61, :]
        elif pts_gt.size(0) == 17:
            eye_corner_left = pts_gt[5, :]
            eye_corner_right = pts_gt[11, :]
        else:
            raise ValueError('number of landmark points should be 106 or 17.')

        intercular = torch.sqrt(torch.sum((eye_corner_right - eye_corner_left) ** 2))
        L = pts_gt.size(0)
        errordiff = torch.sum(torch.sqrt(torch.sum((pts_gt - pts_pre) ** 2, dim=1)))
        errornorm = errordiff / (intercular * L)
        rmse[i] = errornorm.item()
    return np.array(rmse, dtype=np.float)

def compute_auc(errors, failure_threshold, step=0.0001, show_curve=True):
    pass

def save_checkpoint(model, config, epoch):
    file_path = os.path.join(config.train.output_dir, "epoch_{}.pth".format(epoch))
    logging.info('save checkpoint to {}'.format(file_path))
    torch.save(model.state_dict(), file_path)

def adjust_lr(optimizer, config, epoch):
    step = config.train.step
    if epoch in step:
        config.train.lr *= 0.1
        logging.info(f'adjust lr to {config.train.lr}')

    for param_group in optimizer.param_groups:
        param_group['lr'] = config.train.lr

class AverageMeter(object):
    """Computes and stores the average and current value"""
    def __init__(self):
        self.val = 0
        self.avg = 0
        self.sum = 0
        self.count = 0
        self.reset()

    def reset(self):
        self.val = 0
        self.avg = 0
        self.sum = 0
        self.count = 0

    def updata(self, val, n=1):
        self.val = val
        self.sum += val * n
        self.count += n
        self.avg = self.sum / self.count



