import torch
import numpy as np
from torch import einsum
from torch import Tensor
from scipy.ndimage import distance_transform_edt as distance
from scipy.spatial.distance import directed_hausdorff

from typing import Any, Callable, Iterable, List, Set, Tuple, TypeVar, Union



def uniq(a: Tensor) -> Set:
	return set(torch.unique(a.cpu()).numpy())  # 独立不重复的元素，构建一个集合


# Assert utils 测试当前的数据标签是否在正确标签类别范围内
def sset(a: Tensor, sub: Iterable) -> bool:
	# print("1:", uniq(a).issubset(sub))
	return uniq(a).issubset(sub)  # 判断a中的元素是否包含在sub中


def simplex(t: Tensor, axis=1) -> bool:
	_sum = t.sum(axis).type(torch.float32)
	# print("_sum::", _sum)
	_ones = torch.ones_like(_sum, dtype=torch.float32)
	# print("_ones::", _ones)
	return torch.allclose(_sum, _ones)


# 验证其在通道纬度axis=1上的和是否为1
def one_hot(t: Tensor, axis=1) -> bool:
	return simplex(t, axis) and sset(t, [0, 1])


# switch between representations
def probs2class(probs: Tensor) -> Tensor:
	b, _, w, h = probs.shape  # type: Tuple[int, int, int, int]
	assert simplex(probs)
	res = probs.argmax(dim=1)
	assert res.shape == (b, w, h)
	return res


def probs2one_hot(probs: Tensor) -> Tensor:
	_, C, _, _ = probs.shape
	assert simplex(probs)

	res = class2one_hot(probs2class(probs), C)
	assert res.shape == probs.shape
	assert one_hot(res)

	return res


# 对单通道的标签进行转换成多类别的（即多通道的）编码0，1，2，3。。。。
def class2one_hot(seg: Tensor, C: int) -> Tensor:
	if len(seg.shape) == 2:  # Only w, h, used by the dataloader
		seg = seg.unsqueeze(dim=0)
	assert sset(seg, list(range(C)))

	b, w, h = seg.shape  # type: Tuple[int, int, int]

	res = torch.stack([seg == c for c in range(C)], dim=1).type(torch.int32)
	# print("res.shape::", res.shape)
	assert res.shape == (b, C, w, h)
	assert one_hot(res)

	# print("**********************************************************")
	# print("res::", res)
	return res


def one_hot2dist(seg: np.ndarray) -> np.ndarray:
	assert one_hot(torch.Tensor(seg), axis=0)
	C: int = len(seg)  # 通道数量，即类别数量
	res = np.zeros_like(seg)

	for c in range(C):
		posmask = seg[c].astype(np.bool)
		if posmask.any():
			negmask = ~posmask
			print(c, "****")
			print("pos:\n", posmask)
			print('neg:\n', negmask)
			print('distance(negmask):', distance(negmask))
			# distance_transform_edt是scipy库里的一个函数，用于距离转换，计算图像中非零点到最近背景点（即0）的距离。
			print("distance(negmask) * negmask")
			print(distance(negmask) * negmask)

			print("(distance(posmask) - 1) * posmask")
			print((distance(posmask) - 1) * posmask)
			print("123")
			print(distance(negmask) * negmask - (distance(posmask) - 1) * posmask)

			res[c] = float(distance(negmask) * negmask - (distance(posmask) - 1) * posmask)
			print(type(res[c]))
			print("res[c]")
			print(res[c])
	return res


class SurfaceLoss():
	def __init__(self):
		# Self.idc is used to filter out some classes of the target mask. Use fancy indexing
		self.idc: List[int] = [1]  # 这里忽略背景类  https://github.com/LIVIAETS/surface-loss/issues/3

	# probs: bcwh, dist_maps: bcwh
	def __call__(self, probs: Tensor, dist_maps: Tensor, _: Tensor) -> Tensor:
		assert simplex(probs)
		assert not one_hot(dist_maps)

		pc = probs[:, self.idc, ...].type(torch.float32)
		dc = dist_maps[:, self.idc, ...].type(torch.float32)

		# print('pc', pc)
		# print('dc', dc)

		multipled = einsum("bcwh,bcwh->bcwh", pc, dc)

		loss = multipled.mean()

		return loss


if __name__ == "__main__":
	data = torch.tensor([[[0, 0, 0, 0, 0, 0, 0],
				          [0, 1, 1, 1, 0, 0, 0],
	                      [0, 1, 1, 1, 0, 0, 0],
	                      [0, 1, 1, 1, 0, 0, 0],
	                      [0, 0, 0, 0, 0, 0, 0]]])
	# print("data shape :", data.shape)
	data2 = class2one_hot(data, 2)
	# print("data2::", data2)

	data2 = data2[0].numpy()
	# print(data2.shape)
	print(data2)
	data3 = one_hot2dist(data2)  # bcwh

	# print(data3)
	# print("data3.shape:", data3.shape)

	logits = torch.tensor([[[0, 0, 0, 0, 0, 0, 0],
	                        [0, 1, 1, 0, 0, 0, 0],
	                        [0, 1, 1, 0, 0, 0, 0],
	                        [0, 0, 1, 1, 0, 0, 0],
	                        [0, 0, 0, 0, 0, 0, 0]]])

	logits = class2one_hot(logits, 2)

	Loss = SurfaceLoss()
	data3 = torch.tensor(data3).unsqueeze(0)

	res = Loss(logits, data3, None)
	print('loss:', res)

