import glob
import json
import logging
import os
from pathlib import Path

import numpy as np
import torch
import yaml
from fsdet.plugin.yolov5.models.experimental import attempt_load
from fsdet.plugin.yolov5.utils.datasets import create_dataloader
from fsdet.plugin.yolov5.utils.general import (box_iou, check_dataset,
                                               check_file, check_img_size,
                                               coco80_to_coco91_class,
                                               increment_path,
                                               non_max_suppression,
                                               scale_coords, set_logging,
                                               xywh2xyxy, xyxy2xywh)
from fsdet.plugin.yolov5.utils.loss import compute_loss
from fsdet.plugin.yolov5.utils.metrics import ConfusionMatrix, ap_per_class
from fsdet.plugin.yolov5.utils.plots import output_to_target, plot_images
from fsdet.plugin.yolov5.utils.torch_utils import (select_device,
                                                   time_synchronized)
from tqdm import tqdm


def test(outputs,
         targets_label,
         classes_name,
         save_dir='checkpoints/ciwa/10shot_CL_IoU/inference',  # for saving images
         verbose=True,
         save_txt=False,  # for auto-labelling
         save_conf=False,
         plots=True,
         log_imgs=0):  # number of logged images

    logger = logging.getLogger(__name__)
    os.makedirs(save_dir, exist_ok=True)
    device = 'cpu'
    nc = len(classes_name) #if single_cls else int(data['nc'])  # number of classes
    iouv = torch.linspace(0.5, 0.95, 10).to(device)  # iou vector for mAP@0.5:0.95
    niou = iouv.numel()

    seen = 0
    confusion_matrix = ConfusionMatrix(nc=nc)
    names = {k: v for k, v in enumerate(classes_name)}
    logger.info('-'*50)
    s = ('%20s' + '%12s' * 7) % ('Class', 'Images', 'Targets', 'P', 'R', 'F1', 'mAP@.5', 'mAP@.5:.95')
    logger.info(s)
    p, r, f1, mp, mr, mf1, map50, map, t0, t1 = 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.
    loss = torch.zeros(3, device=device)
    jdict, stats, ap, ap_class, wandb_images = [], [], [], [], []
    for batch_i, (targets, output) in enumerate(zip(targets_label, outputs)):
        targets = np.concatenate([targets['labels'][..., None], targets['bboxes']], axis=1)
        targets = torch.tensor(targets).type(torch.float32)

        # Statistics per image
        for si, out in enumerate(output):  # output: (300,6)
            # labels = targets[targets[:, 0] == si, 1:]
            nl = len(targets)
            tcls = targets[:, 0].tolist() if nl else []  # target class
            #
            instances = out["instances"].to('cpu')
            boxes = instances.pred_boxes.tensor
            # boxes[:, :2] += 1
            scores = instances.scores
            classes = instances.pred_classes
            # classes = (classes - 15).clamp(0, 4)
            pred = torch.cat([boxes, scores[..., None], classes[..., None]], dim=1)

            paths = ''
            path = Path('') #Path(paths[si])
            seen += 1

            if len(pred) == 0:
                if nl:
                    stats.append((torch.zeros(0, niou, dtype=torch.bool), torch.Tensor(), torch.Tensor(), tcls))
                continue

            # Predictions
            predn = pred.clone()
            # scale_coords(img[si].shape[1:], predn[:, :4], shapes[si][0], shapes[si][1])  # native-space pred

            # Assign all predictions as incorrect
            correct = torch.zeros(pred.shape[0], niou, dtype=torch.bool, device=device)
            if nl:
                detected = []  # target indices
                tcls_tensor = targets[:, 0]

                # target boxes
                tbox = targets[:, 1:5]
                # scale_coords(img[si].shape[1:], tbox, shapes[si][0], shapes[si][1])  # native-space labels
                if plots:
                    confusion_matrix.process_batch(pred, torch.cat((targets[:, 0:1], tbox), 1))

                # Per target class
                for cls in torch.unique(tcls_tensor):
                    ti = (cls == tcls_tensor).nonzero(as_tuple=False).view(-1)  # prediction indices
                    pi = (cls == pred[:, 5]).nonzero(as_tuple=False).view(-1)  # target indices

                    # Search for detections
                    if pi.shape[0]:
                        # Prediction to target ious
                        ious, i = box_iou(predn[pi, :4], tbox[ti]).max(1)  # best ious, indices

                        # Append detections
                        detected_set = set()
                        for j in (ious > iouv[0]).nonzero(as_tuple=False):
                            d = ti[i[j]]  # detected target
                            if d.item() not in detected_set:
                                detected_set.add(d.item())
                                detected.append(d)
                                correct[pi[j]] = ious[j] > iouv  # iou_thres is 1xn
                                if len(detected) == nl:  # all targets already located in image
                                    break

            # Append statistics (correct, conf, pcls, tcls)
            stats.append((correct.cpu(), pred[:, 4].cpu(), pred[:, 5].cpu(), tcls))


    # Compute statistics
    stats = [np.concatenate(x, 0) for x in zip(*stats)]  # to numpy
    if len(stats) and stats[0].any():
        p, r, ap, f1, ap_class = ap_per_class(*stats, plot=plots, save_dir=save_dir, names=names)
        p, r, ap50, ap, f1 = p[:, 0], r[:, 0], ap[:, 0], ap.mean(1), f1[:,0]  # [P, R, AP@0.5, AP@0.5:0.95]
        mp, mr, map50, map, mf1 = p.mean(), r.mean(), ap50.mean(), ap.mean(), f1.mean()
        nt = np.bincount(stats[3].astype(np.int64), minlength=nc)  # number of targets per class
    else:
        nt = torch.zeros(1)

    # Plots
    if plots:
        confusion_matrix.plot(save_dir=save_dir, names=list(names.values()))

    # Print results
    pf = '%20s' + '%12.3g' * 7  # print format
    logger.info(pf % ('all', seen, nt.sum(), mp, mr, mf1, map50, map))

    # Print results per class
    if verbose and nc > 1 and len(stats):
        for i, c in enumerate(ap_class):
            logger.info(pf % (names[c], seen, nt[c], p[i], r[i], f1[i], ap50[i], ap[i]))

    # Return results
    maps = np.zeros(nc) + map
    for i, c in enumerate(ap_class):
        maps[c] = ap[i]
    return mp, mr, mf1, map50, map, maps
