"""pointnet2 utils"""

import numpy as np
import mindspore as ms
import mindspore.numpy as mnp
from mindspore import ops
from mindspore.common.tensor import Tensor
from mindspore.ops.primitive import constexpr
import logging
from tqdm import tqdm


@constexpr
def generate_tensor_fps(b, n):
    """generate tensor"""
    farthest = Tensor(np.random.randint(n, size=(b,)), ms.int32)
    return farthest


def square_distance(src, dst):
    """
    Calculate Euclid distance between each two points.

    src^T * dst = xn * xm + yn * ym + zn * zm;
    sum(src^2, dim=-1) = xn*xn + yn*yn + zn*zn;
    sum(dst^2, dim=-1) = xm*xm + ym*ym + zm*zm;
    dist = (xn - xm)^2 + (yn - ym)^2 + (zn - zm)^2
         = sum(src**2,dim=-1)+sum(dst**2,dim=-1)-2*src^T*dst

    Input:
        src: source points, [b, n, c]
        dst: target points, [b, m, c]
    Output:
        dist: per-point square distance, [b, n, m]
    """
    b, n, _ = src.shape
    _, m, _ = dst.shape
    dist = -2 * ops.BatchMatMul()(src, ops.Transpose()(dst, (0, 2, 1)))
    dist += ops.Reshape()(ops.ReduceSum()(src ** 2, -1), (b, n, 1))
    dist += ops.Reshape()(ops.ReduceSum()(dst ** 2, -1), (b, 1, m))
    return dist


def index_points(points, idx):
    """
    Input:
        points: input points data, [b, n, c]
        idx: sample index data, [b, s] or [b, s, nsample]
    Return:
        new_points:, indexed points data, [b, S, c] or [b, s, nsample, c]
    """
    shape = idx.shape
    batch_indices = mnp.arange(shape[0], dtype=ms.int32)
    if len(shape) == 2:
        batch_indices = batch_indices.view(shape[0], 1)
    else:
        batch_indices = batch_indices.view(shape[0], 1, 1)
    batch_indices = batch_indices.expand_as(idx)
    index = ops.Concat(-1)((batch_indices.reshape(idx.shape + (1,)), idx.reshape(idx.shape + (1,))))
    new_points = ops.GatherNd()(points, index)
    return new_points


def farthest_point_sample(xyz, npoint):
    """
    Input:
        xyz: pointcloud data, [b, n, 3] or[b, n, 6]
        npoint: number of samples
    Return:
        centroids: sampled pointcloud index, [b, npoint]
    """
    poinds= mnp.ones(xyz.shape, ms.float32)
    poinds = xyz[:, :, :3]
    b, n, _ = poinds.shape
    centroids = mnp.zeros((npoint, b), ms.int32)
    distance = mnp.ones((b, n), ms.float64) * 1e9
    farthest = generate_tensor_fps(b, n)
    batch_indices = mnp.arange(b, dtype=ms.int32)
    for i in range(npoint):
        centroids = ops.Cast()(centroids, ms.float32)
        farthest = ops.Cast()(farthest, ms.float32)
        centroids[i] = farthest
        centroids = ops.Cast()(centroids, ms.int32)
        farthest = ops.Cast()(farthest, ms.int32)
        index = ops.Concat(-1)((batch_indices.reshape(batch_indices.shape + (1,)),
                                farthest.reshape(farthest.shape + (1,))))
        centroid = ops.GatherNd()(poinds, index).reshape((b, 1, 3))
        dist = ops.ReduceSum()((poinds - centroid) ** 2, -1)
        distance = ops.Minimum()(distance, dist).astype(ms.float32) #24,8192
        farthest = ops.Argmax()(distance) #24
    return ops.Transpose()(centroids, (1, 0)) #24,1024


def query_ball_point(radius, nsample, xyz, new_xyz):
    """
    Input:
        radius: local region radius
        nsample: max sample number in local region
        xyz: all points, [b, n, 3]
        new_xyz: query points, [b, s, 3]
    Return:
        group_idx: grouped points index, [b, s, nsample]
    """
    b, n, _ = xyz.shape
    _, s, _ = new_xyz.shape
    group_idx = mnp.arange(0, n, 1, ms.int32).view(1, 1, n)
    group_idx = ops.Tile()(group_idx, (b, s, 1))
    sqrdists = square_distance(new_xyz, xyz)

    idx = sqrdists > radius ** 2
    group_idx = ops.Select()(idx, -1 * ops.OnesLike()(group_idx), group_idx)
    group_idx = ops.Cast()(group_idx, ms.float32)
    group_idx, _ = ops.TopK()(group_idx, nsample)
    group_idx = ops.Cast()(group_idx, ms.int32)

    group_first = group_idx[:, :, 0].view(b, s, 1)
    group_first = ops.Tile()(group_first, (1, 1, nsample))

    index = group_idx != -1
    group_first = ops.Select()(index, -1 * ops.OnesLike()(group_first), group_first)
    group_idx = ops.Maximum()(group_idx, group_first)

    return group_idx


def sample_and_group_all(xyz, points):
    """
    Input:
        xyz: input points position data, [b, n, 3]
        points: input points data, [b, n, d]
    Return:
        new_xyz: sampled points position data, [b, 1, 3]
        new_points: sampled points data, [b, 1, n, 3+d]
    """
    b, n, c = xyz.shape
    new_xyz = ops.Zeros()((b, 1, c), ms.float32)
    grouped_xyz = ops.Reshape()(xyz, (b, 1, n, c))
    if points is not None:
        new_points = ops.Concat(-1)((grouped_xyz, ops.Reshape()(points, (b, 1, n, -1))))
    else:
        new_points = grouped_xyz
    return new_xyz, new_points


def sample_and_group(npoint, radius, nsample, xyz, points):
    """
    Input:
        xyz: input points position data, [b, n, 3]
        points: input points data, [b, n, d]
    Return:
        new_xyz: sampled points position data, [b, npoint, nsample, 3]
        new_points: sampled points data, [b, npoint, nsample, 3+d]
    """
    b, _, c = xyz.shape
    s = npoint
    fps_idx = farthest_point_sample(xyz, s)
    new_xyz = index_points(xyz, fps_idx)
    idx = query_ball_point(radius, nsample, xyz, new_xyz)
    grouped_xyz = index_points(xyz, idx)
    grouped_xyz_norm = grouped_xyz - new_xyz.view(b, s, 1, c)

    if points is not None:
        grouped_points = index_points(points, idx)
        new_points = ops.Concat(-1)((grouped_xyz_norm, grouped_points))
    else:
        new_points = grouped_xyz_norm

    return new_xyz, new_points

def filter_points(coords, preds, targets, weights):
    """filter points"""
    assert coords.shape[0] == preds.shape[0] == targets.shape[0] == weights.shape[0]
    coord_hash = [hash(str(coords[point_idx][0]) + str(coords[point_idx][1]) + str(coords[point_idx][2])) for
                  point_idx in range(coords.shape[0])]
    _, coord_ids = np.unique(np.array(coord_hash), return_index=True)
    coord_filtered, pred_filtered, target_filtered, weight_filtered = coords[coord_ids], preds[coord_ids], targets[
        coord_ids], weights[coord_ids]

    return coord_filtered, pred_filtered, target_filtered, weight_filtered


def point_cloud_label_to_surface_voxel_label_fast(point_cloud, label, res=0.0484):
    """Convert point cloud label to surface voxel label"""
    coordmax = np.max(point_cloud, axis=0)
    coordmin = np.min(point_cloud, axis=0)
    nvox = np.ceil((coordmax - coordmin) / res)
    vidx = np.ceil((point_cloud - coordmin) / res)
    vidx = vidx[:, 0] + vidx[:, 1] * nvox[0] + vidx[:, 2] * nvox[0] * nvox[1]
    uvidx, vpidx = np.unique(vidx, return_index=True)
    if label.ndim == 1:
        uvlabel = label[vpidx]
    else:
        assert label.ndim == 2
    uvlabel = label[vpidx, :]
    return uvidx, uvlabel, nvox


def compute_acc(coords, preds, targets, weights, num_classes):
    """compute acc"""
    coords, preds, targets, weights = filter_points(coords, preds, targets, weights)

    seen_classes = np.unique(targets)
    mask = np.zeros(num_classes)
    mask[seen_classes] = 1

    total_correct = 0
    total_seen = 0
    total_seen_class = [0 for _ in range(num_classes)]
    total_correct_class = [0 for _ in range(num_classes)]

    total_correct_vox = 0
    total_seen_vox = 0
    total_seen_class_vox = [0 for _ in range(num_classes)]
    total_correct_class_vox = [0 for _ in range(num_classes)]

    labelweights = np.zeros(num_classes)
    labelweights_vox = np.zeros(num_classes)

    correct = np.sum(preds == targets)  # evaluate only on 20 categories but not unknown
    total_correct += correct
    total_seen += targets.shape[0]
    tmp, _ = np.histogram(targets, range(num_classes + 1))
    labelweights += tmp
    for l in seen_classes:
        total_seen_class[l] += np.sum(targets == l)
        total_correct_class[l] += np.sum((preds == l) & (targets == l))

    _, uvlabel, _ = point_cloud_label_to_surface_voxel_label_fast(coords, np.concatenate(
        (np.expand_dims(targets, 1), np.expand_dims(preds, 1)), axis=1), res=0.02)
    total_correct_vox += np.sum(uvlabel[:, 0] == uvlabel[:, 1])
    total_seen_vox += uvlabel[:, 0].shape[0]
    tmp, _ = np.histogram(uvlabel[:, 0], range(num_classes + 1))
    labelweights_vox += tmp
    for l in seen_classes:
        total_seen_class_vox[l] += np.sum(uvlabel[:, 0] == l)
        total_correct_class_vox[l] += np.sum((uvlabel[:, 0] == l) & (uvlabel[:, 1] == l))

    pointacc = total_correct / float(total_seen)
    voxacc = total_correct_vox / float(total_seen_vox)

    labelweights = labelweights.astype(np.float32) / np.sum(labelweights.astype(np.float32))
    labelweights_vox = labelweights_vox.astype(np.float32) / np.sum(labelweights_vox.astype(np.float32))
    caliweights = labelweights_vox
    voxcaliacc = np.average(
        np.array(total_correct_class_vox) / (np.array(total_seen_class_vox, dtype=np.float64) + 1e-8),
        weights=caliweights)

    pointacc_per_class = np.zeros(num_classes)
    voxacc_per_class = np.zeros(num_classes)
    for l in seen_classes:
        pointacc_per_class[l] = total_correct_class[l] / (total_seen_class[l] + 1e-8)
        voxacc_per_class[l] = total_correct_class_vox[l] / (total_seen_class_vox[l] + 1e-8)

    return pointacc, pointacc_per_class, voxacc, voxacc_per_class, voxcaliacc, mask


def compute_miou(coords, preds, targets, weights, num_classes):
    """compute miou"""
    coords, preds, targets, weights = filter_points(coords, preds, targets, weights)
    seen_classes = np.unique(targets)
    mask = np.zeros(num_classes)
    mask[seen_classes] = 1

    pointmiou = np.zeros(num_classes)
    voxmiou = np.zeros(num_classes)

    uvidx, uvlabel, _ = point_cloud_label_to_surface_voxel_label_fast(coords, np.concatenate(
        (np.expand_dims(targets, 1), np.expand_dims(preds, 1)), axis=1), res=0.02)
    for l in seen_classes:
        target_label = np.arange(targets.shape[0])[targets == l]
        pred_label = np.arange(preds.shape[0])[preds == l]
        num_intersection_label = np.intersect1d(pred_label, target_label).shape[0]
        num_union_label = np.union1d(pred_label, target_label).shape[0]
        pointmiou[l] = num_intersection_label / (num_union_label + 1e-8)

        target_label_vox = uvidx[(uvlabel[:, 0] == l)]
        pred_label_vox = uvidx[(uvlabel[:, 1] == l)]
        num_intersection_label_vox = np.intersect1d(pred_label_vox, target_label_vox).shape[0]
        num_union_label_vox = np.union1d(pred_label_vox, target_label_vox).shape[0]
        voxmiou[l] = num_intersection_label_vox / (num_union_label_vox + 1e-8)

    return pointmiou, voxmiou, mask


def log_string(filename, verbosity=1, name=None):
    """log init"""
    level_dict = {0: logging.DEBUG, 1: logging.INFO, 2: logging.WARNING}
    formatter = logging.Formatter(
        "[%(asctime)s][%(filename)s][line:%(lineno)d][%(levelname)s] %(message)s"
    )
    logger = logging.getLogger(name)
    logger.setLevel(level_dict[verbosity])

    fh = logging.FileHandler(filename, "w")
    fh.setFormatter(formatter)
    logger.addHandler(fh)

    sh = logging.StreamHandler()
    sh.setFormatter(formatter)
    logger.addHandler(sh)
    return logger


def miou_eval(model, test_ds, test_steps_per_epoch, batch_size, num_classes):
    """eval-get miou"""
    print("evaluating...")
    pointacc_list = []
    pointacc_per_class_array = np.zeros((test_steps_per_epoch, num_classes))
    voxacc_list = []
    voxacc_per_class_array = np.zeros((test_steps_per_epoch, num_classes))
    voxcaliacc_list = []
    pointmiou_per_class_array = np.zeros((test_steps_per_epoch, num_classes))
    voxmiou_per_class_array = np.zeros((test_steps_per_epoch, num_classes))
    masks = np.zeros((test_steps_per_epoch, num_classes))

    # iter
    for load_idx, data in tqdm(enumerate(test_ds.create_dict_iterator(), 0), total=test_steps_per_epoch, smoothing=0.9):
        # feed
        coords, targets, weights = data["data"], data["label"], data["weights"]
        pred = []
        n, _, chan = coords.shape
        if n > batch_size:
            for i in range((n - 1) // batch_size + 1):
                if (i + 1) * batch_size <= n:
                    coord = coords[i * batch_size:(i + 1) * batch_size, :, :]
                else:
                    coord = coords[i * batch_size:, :, :]
                coord = ops.Transpose()(coord, (0, 2, 1)).astype("float32")
                output = model(coord)
                pred.append(output)
        else:
            coord = coords
            coord = ops.Transpose()(coord, (0, 2, 1)).astype("float32")
            output = model(coord)
            pred.append(output)

        x = ops.Concat(0)(pred)
        pred = ops.ExpandDims()(x, 0)  # (1, CK, N, C)
        preds = pred.argmax(3)

        # eval
        coords = coords.view(-1, chan).asnumpy()  # (CK*N, C)
        preds = preds.squeeze(0).view(-1).asnumpy()  # (CK*N, C)
        targets = targets.view(-1).asnumpy()  # (CK*N, C)
        weights = weights.view(-1).asnumpy()  # (CK*N, C)
        pointacc, pointacc_per_class, voxacc, voxacc_per_class, voxcaliacc, acc_mask = compute_acc(coords, preds,
                                                                                                   targets, weights,
                                                                                                   num_classes)
        pointmiou, voxmiou, miou_mask = compute_miou(coords, preds, targets, weights, num_classes)
        assert acc_mask.all() == miou_mask.all()
        mask = acc_mask

        # dump
        pointacc_list.append(pointacc)
        pointacc_per_class_array[load_idx] = pointacc_per_class
        voxacc_list.append(voxacc)
        voxacc_per_class_array[load_idx] = voxacc_per_class
        voxcaliacc_list.append(voxcaliacc)
        pointmiou_per_class_array[load_idx] = pointmiou
        voxmiou_per_class_array[load_idx] = voxmiou
        masks[load_idx] = mask

    avg_pointacc = np.mean(pointacc_list)
    avg_pointacc_per_class = np.sum(pointacc_per_class_array * masks, axis=0) / np.sum(masks, axis=0)

    avg_voxacc = np.mean(voxacc_list)
    avg_voxacc_per_class = np.sum(voxacc_per_class_array * masks, axis=0) / np.sum(masks, axis=0)

    avg_voxcaliacc = np.mean(voxcaliacc_list)

    avg_pointmiou_per_class = np.sum(pointmiou_per_class_array * masks, axis=0) / np.sum(masks, axis=0)
    avg_pointmiou = np.mean(avg_pointmiou_per_class)

    avg_voxmiou_per_class = np.sum(voxmiou_per_class_array * masks, axis=0) / np.sum(masks, axis=0)
    avg_voxmiou = np.mean(avg_voxmiou_per_class)

    return avg_pointmiou