#!/usr/bin/env python3
# -*- coding: utf-8 -*-

import os
import numpy as np
import pandas as pd
import torch

from detectron2.config import get_cfg
from detectron2 import model_zoo
from detectron2.modeling import build_model
from detectron2.checkpoint import DetectionCheckpointer

from .mdistance_utils import batched_extr_features_fromDet, calc_md, dist2prob, plot_hist, plot_hist_v2, compute_distance, compute_angle, plot_hr_map, plot_hr_map_v2, plot_dv_map, plot_md
from .detector import add_det_config
from .dataset_utils import get_det_dicts, dump_dicts, gen_img_crop
from pathlib import Path
from tqdm import tqdm
from numpy.linalg import inv


def extr_features(opt):

    # Dataset path
    det_dataset_root = os.path.join("./datasets", opt.exp_id, "det")
    cur_det_dataset = os.path.join(det_dataset_root, opt.cur_ptl_iter)
    md_dataset_root = os.path.join("./datasets", opt.exp_id, "md")
    cur_md_dataset = os.path.join(md_dataset_root, opt.cur_ptl_iter)

    # Model path
    model_id = "final"
    model_root = os.path.join("./models", opt.exp_id, "det")
    model_weight = os.path.join(model_root, opt.cur_ptl_iter, "model_" + model_id + ".pth")

    # Output path
    output_root = os.path.join("./results", opt.exp_id, "md")
    det_output_path = os.path.join(output_root, opt.cur_ptl_iter, "det")
    md_output_path  = os.path.join(output_root, opt.cur_ptl_iter, "md")

    # Setup Model
    cfg = get_cfg()
    cfg.merge_from_file(model_zoo.get_config_file("COCO-Detection/retinanet_R_50_FPN_1x.yaml"))
    cfg.MODEL.META_ARCHITECTURE = "RetinaNetPTL"
    cfg.MODEL.RETINANET.NUM_CLASSES = 1
    cfg.MODEL.RETINANET.NUM_CONVS = 2
    cfg.MODEL.RETINANET.SCORE_THRESH_TEST = opt.score_thresh_test_extr
    cfg.INPUT.RANDOM_FLIP = "none" 
    cfg.INPUT.MIN_SIZE_TRAIN = tuple(opt.min_size_train_det)
    cfg.INPUT.MAX_SIZE_TRAIN = opt.max_size_train_det
    
    ## Add configs
    add_det_config(cfg, opt)

    model = build_model(cfg)
    DetectionCheckpointer(model).load(model_weight)
    model.eval()

    # Extract features
    ## Rt set
    num_det_insts = batched_extr_features_fromDet(cfg, model, "det_" + opt.cur_ptl_iter, cur_det_dataset, det_output_path, opt.extr_mode, 1)
    print("\n%d insts are extracted from det." % num_det_insts)

    ## Vt set
    for min_size in opt.min_size_train_syn_det:
        print("\n%d scale:" % min_size)
        cfg.INPUT.MIN_SIZE_TRAIN_SYN = min_size
        md_output_path_scale = os.path.join(md_output_path, str(min_size))
    
        num_md_insts = batched_extr_features_fromDet(cfg, model, "md_" + opt.cur_ptl_iter + "_" + str(min_size), cur_md_dataset, md_output_path_scale, opt.extr_mode, opt.batch_size_extr)
        print("\n%d insts are extracted from md in scale %d." % (num_md_insts, min_size))  
        

def calc_mdistance(opt):

    np.random.seed(5271)

    # Setup Paths
    extract_root = os.path.join("./results", opt.exp_id, "md")
    output_path = os.path.join(extract_root, opt.cur_ptl_iter)
    Path(output_path).mkdir(parents=True, exist_ok=True)

    md_dataset_root = os.path.join("./datasets", opt.exp_id, "md")
    gan_dataset_root = os.path.join("./datasets", opt.exp_id, "gan")
    det_dataset_root = os.path.join("./datasets", opt.exp_id, "det")

    # Calculate mean and covariance of Rt
    print("\nCalculate det mean...")
    det_features_path = os.path.join(extract_root, opt.cur_ptl_iter, "det", "features.csv")
    det_human_df = pd.read_csv(det_features_path, header=None)
    det_human_df = det_human_df[det_human_df[1] >= opt.score_thresh_r] # score >= score_thres
    det_human = det_human_df[list(range(2, opt.features_dim + 2, 1))].values.astype(np.float32)
    det_human_mean = np.mean(det_human, axis=0)
    num_det_human = det_human.shape[0]

    print("\nCalculate det cov...")
    det_human_cov_sum = 0
    det_human_minus_mean = det_human - det_human_mean
    for idx in tqdm(range(num_det_human)):
        det_human_cov_sum += np.outer(det_human_minus_mean[idx], det_human_minus_mean[idx])
    det_human_cov_mean = det_human_cov_sum/num_det_human
    det_human_cov_mean_inv = inv(det_human_cov_mean)

    del det_human_minus_mean, det_human_cov_sum, det_human_cov_mean, idx

    # Calculate mdistance of each virtual human from Vt
    print("\nCollect synPerson...")
    ## Read virtual humans from all scales
    for idx, scale in enumerate(opt.min_size_train_syn_det):
        if idx == 0:
            df_syn = pd.read_csv(os.path.join(extract_root, opt.cur_ptl_iter, "md", str(scale), "features.csv"), header=None)
            df_syn[len(df_syn.columns)] = [scale] * len(df_syn)
        else:
            df_syn_sc = pd.read_csv(os.path.join(extract_root, opt.cur_ptl_iter, "md", str(scale), "features.csv"), header=None)
            df_syn_sc[len(df_syn_sc.columns)] = [scale] * len(df_syn_sc)
            df_syn = pd.concat([df_syn, df_syn_sc], ignore_index=True)
        
    ## score >= score_thres
    df_syn = df_syn[df_syn[1] >= opt.score_thresh_v]

    ## Extract features from the dataframe
    md_human = df_syn.values[:, 2:-1].astype(np.float32)
    df_syn = df_syn.drop(df_syn.columns[2:-1], axis=1)
    df_syn = df_syn.rename(columns={0:"img_name", 1:"score", 34:"scale"})

    ## Calculate mdistance
    if opt.use_euclidean:
        print("\nCalculate eudistance...")
    else:
        print("\nCalculate mdistance...")

    md_human = md_human - det_human_mean
    md_human_dist = []
    for idx, inst in enumerate(tqdm(md_human)):
        if opt.use_euclidean:
            mdist = np.dot(inst, inst)
        else:
            mdist = calc_md(inst, det_human_cov_mean_inv)
        md_human_dist.append(mdist)
    df_syn["mdistance"] = md_human_dist

    ## keep the smallest mdistance amond all scales for each image
    print("\nFind the min mdistance for each image and keep it...")
    idx_min = df_syn.groupby(['img_name'])['mdistance'].transform(min) == df_syn['mdistance']
    df_syn = df_syn[idx_min]
    df_syn = df_syn.drop_duplicates(subset=["img_name"])

    # Weighted random sampling
    if opt.imp_samp == "direct":
        print("\nSampling (direct) based on mdistance...")
        prob = dist2prob(df_syn["mdistance"].to_numpy())
        selected_idx = np.random.choice(len(df_syn), opt.n_selected_syn_per_iter, p=prob, replace=False)
        selected_df_syn = df_syn.iloc[selected_idx].sort_values("mdistance")
        plot_hist(selected_df_syn, output_path, name="sampled_syn")
        plot_hist_v2(df_syn, output_path, name="all_syn")
    elif opt.imp_samp == "exp":
        print("\nSampling (exp) based on mdistance (tau=%f)..." % opt.exp_tau)
        prob = dist2prob(df_syn["mdistance"].to_numpy(), tau=opt.exp_tau)
        selected_idx = np.random.choice(len(df_syn), opt.n_selected_syn_per_iter, p=prob, replace=False)
        selected_df_syn = df_syn.iloc[selected_idx].sort_values("mdistance")
        plot_hist(selected_df_syn, output_path, name="sampled_syn")
        plot_hist_v2(df_syn, output_path, name="all_syn")
    elif opt.imp_samp == "low":
        selected_df_syn = df_syn.sort_values("mdistance").head(opt.n_selected_syn_per_iter)
        print("\nSelecting low-%d: from %.3f to %.3f" % (opt.n_selected_syn_per_iter, 
                                                         selected_df_syn.iloc[0]['mdistance'],
                                                         selected_df_syn.iloc[-1]['mdistance']))
        plot_hist(selected_df_syn, output_path, name="sampled_syn")
        plot_hist_v2(df_syn, output_path, name="all_syn")
    elif opt.imp_samp == "mid":
        mid_idx = int(len(df_syn)/2)
        half_sel_num = int(opt.n_selected_syn_per_iter/2)
        selected_df_syn = df_syn.sort_values("mdistance").iloc[mid_idx-half_sel_num:mid_idx+half_sel_num]
        print("\nSelecting mid-%d: from %.3f to %.3f" % (opt.n_selected_syn_per_iter, 
                                                         selected_df_syn.iloc[0]['mdistance'],
                                                         selected_df_syn.iloc[-1]['mdistance']))
        plot_hist(selected_df_syn, output_path, name="sampled_syn")
        plot_hist_v2(df_syn, output_path, name="all_syn")
    elif opt.imp_samp == "high":
        selected_df_syn = df_syn.sort_values("mdistance").tail(opt.n_selected_syn_per_iter)
        print("\nSelecting high-%d: from %.3f to %.3f" % (opt.n_selected_syn_per_iter, 
                                                          selected_df_syn.iloc[0]['mdistance'],
                                                          selected_df_syn.iloc[-1]['mdistance']))
        plot_hist(selected_df_syn, output_path, name="sampled_syn")
        plot_hist_v2(df_syn, output_path, name="all_syn")

    # Analysis
    print("\nAnalysis...")

    ## [height, radius, angle]
    md_human_id = [[int(int(x.split("_")[4])/5 - 1)] +
                   [int(int(x.split("_")[5])/5 - 1)] +
                   [int(int(x.split("_")[6])/opt.syn_smp_angles)] for x in df_syn.values[:, 0].tolist()]

    ## [height, radius, angle]
    num_inst = np.zeros([10, 6, int(360/opt.syn_smp_angles)])
    sum_dist = np.zeros([10, 6, int(360/opt.syn_smp_angles)])

    ## Selected (below the mdistance threshould)
    num_sel_inst = np.zeros([10, 6, int(360/opt.syn_smp_angles)])

    ## [distance, viewing_angle]
    num_inst_dv = np.zeros([3, 6]) 
    sum_dist_dv = np.zeros([3, 6])
    num_sel_inst_dv = np.zeros([3, 6])
        
    ## Iterate over all the insts
    for idx in tqdm(range(len(df_syn))):
        height = md_human_id[idx][0]
        radius = md_human_id[idx][1]
        angle  = md_human_id[idx][2]
    
        dist, vangle = compute_distance(height*5+5, radius*5+5), compute_angle(height*5+5, radius*5+5)
        
        mdist = df_syn.iloc[idx]["mdistance"]
        num_inst[height][radius][angle] += 1
        sum_dist[height][radius][angle] += mdist
    
        num_inst_dv[int(dist/20)][int(vangle/15)] += 1
        sum_dist_dv[int(dist/20)][int(vangle/15)] += mdist
    
        if df_syn.iloc[idx]["img_name"] in selected_df_syn["img_name"].tolist():
            num_sel_inst[height][radius][angle] += 1
            num_sel_inst_dv[int(dist/20)][int(vangle/15)] += 1

    ## [H, R]
    sum_H_R = np.sum(sum_dist, axis=2)
    num_H_R = np.sum(num_inst, axis=2)
    num_sel_H_R = np.sum(num_sel_inst, axis=2)

    ## Accumulated results
    acc_num_sel_H_R = num_sel_H_R.copy()
    acc_num_sel_inst_dv = num_sel_inst_dv.copy()
    if opt.cur_ptl_iter != "iter_0":
        prev_iter = "iter_" + str([int(i) - 1 for i in opt.cur_ptl_iter.split("_") if i.isdigit()][0])
        acc_num_sel_H_R = acc_num_sel_H_R + np.load(os.path.join(extract_root, prev_iter, "acc_sel_insts.npy"))
        acc_num_sel_inst_dv = acc_num_sel_inst_dv + np.load(os.path.join(extract_root, prev_iter, "acc_sel_insts_dv.npy"))

    with open(os.path.join(output_path, "acc_sel_insts.npy"), 'wb') as f:
        np.save(f, acc_num_sel_H_R)
    
    with open(os.path.join(output_path, "acc_sel_insts_dv.npy"), 'wb') as f:
        np.save(f, acc_num_sel_inst_dv)

    ## [H, R] mdistance
    M = sum_H_R / (num_H_R + 1e-6)
    N = num_H_R

    ## [H, R] selected
    Q = num_sel_H_R
    S = acc_num_sel_H_R

    ## [D, V] selected
    C = sum_dist_dv / (num_inst_dv + 1e-6)
    A = num_inst_dv

    ## [D, V] mdistance
    B = num_sel_inst_dv
    D = acc_num_sel_inst_dv

    del height, radius, angle, mdist, idx

    # Display and output
    ## Clip for display
    M[M > opt.md_clip] = opt.md_clip
    C[C > opt.md_clip] = opt.md_clip

    print("\n2d hr map...")
    plot_hr_map(M, "MDistance_HR_clipped", output_path)
    plot_hr_map(N, "Num_of_Detected_Insts_HR", output_path)
    plot_hr_map(Q, "Num_of_Selected_Insts_HR", output_path)

    if opt.cur_ptl_iter == "iter_0":
        plot_hr_map_v2(S, "Total_Selected_Insts_HR", output_path, max_value=np.max(S)/opt.n_selected_syn_per_iter)
    else:
        S_0 = np.load(os.path.join(extract_root, "iter_0", "acc_sel_insts.npy"))
        plot_hr_map_v2(S, "Total_Selected_Insts_HR", output_path, max_value=np.max(S_0)/opt.n_selected_syn_per_iter)

    plot_dv_map(C, "MDistance_DV_clipped", output_path)
    plot_dv_map(A, "Num_of_Detected_Insts_DV", output_path)
    plot_dv_map(B, "Num_of_Selected_Insts_DV", output_path)
    plot_dv_map(D, "Total_Selected_Insts_DV", output_path)

    print("\nSave mdistance...")
    plot_md(df_syn, output_path, display_cdf=opt.display_cdf, md_clip=opt.md_clip)

    print("%d images are selected in this iteration..." % np.sum(num_sel_H_R))
    print("%d images are totally selected..." % np.sum(acc_num_sel_H_R))

    # Make train-set for GAN
    selected_img_id = selected_df_syn["img_name"].values.tolist()

    ## trainA
    cur_md_dataset_path = os.path.join(md_dataset_root, opt.cur_ptl_iter)
    cur_md_dataset = get_det_dicts(dataset_path=cur_md_dataset_path)
    trainA = [x for x in cur_md_dataset if x["image_id"] in selected_img_id]

    ## trainB
    cur_det_dataset_path = os.path.join(det_dataset_root, opt.cur_ptl_iter)
    trainB = get_det_dicts(dataset_path=cur_det_dataset_path)

    ## Output
    output_gan_path = os.path.join(gan_dataset_root, opt.cur_ptl_iter)
    Path(output_gan_path).mkdir(parents=True, exist_ok=True)

    dump_dicts(trainA, output_gan_path, "trainA.json")
    dump_dicts(trainB, output_gan_path, "trainB.json")

    gen_img_crop(trainA, output_gan_path, "A", min_bbox_hw=opt.min_bbox_hw_gan, target_hw=opt.target_hw_gan)
    gen_img_crop(trainB, output_gan_path, "B", min_bbox_hw=opt.min_bbox_hw_gan, target_hw=opt.target_hw_gan)    