from torch import nn
import random
import torch.nn.functional as F
import torch


class DepthwiseConv2D(nn.Module):
	def __init__(self, in_channels, out_channels, kernel_size):
		super(DepthwiseConv2D, self).__init__()
		self.kernel_size=kernel_size
		self.out_channel =out_channels
		self.depthwise_conv = nn.Conv2d(in_channels, in_channels, kernel_size=kernel_size, groups=in_channels)
		self.point_conv = nn.Conv2d(in_channels, out_channels, kernel_size=1)

	def get_property(self):
		return [self.out_channel,self.kernel_size]

	def forward(self, x):
		out = self.depthwise_conv(x)
		out = self.point_conv(out)
		return out


class DilatedConv2D(nn.Module):
	def __init__(self, in_channels, out_channels, kernel_size, dilation_rate=1):
		super(DilatedConv2D, self).__init__()
		self.kernel_size=kernel_size
		self.out_channel = out_channels
		self.dilated_conv = nn.Conv2d(in_channels, out_channels, kernel_size, dilation=dilation_rate)

	def get_property(self):
		return [self.out_channel,self.kernel_size]
	
	def forward(self, x):
		return self.dilated_conv(x)


# conv space
class _CONV_Space:
	def __init__(self, in_channels, out_channels, dilation_rate=1):
		self.FA = False
		self.a = DilatedConv2D(in_channels, out_channels, 3, dilation_rate)
		self.b = DepthwiseConv2D(in_channels, out_channels, 3)
		self.c = DilatedConv2D(in_channels, out_channels, 5, dilation_rate)
		self.d = DepthwiseConv2D(in_channels, out_channels, 5)

	def random_select(self):
		attributes = list(self.__dict__.items())
		return random.choice(attributes)

	def random_select_NoFA(self):
		attributes = {k: v for k, v in self.__dict__.items() if k != 'FA'}

		# 如果没有剩余属性，返回 None
		if not attributes:
			return None

		# 随机选择一个剩余的属性及其值
		random_attr = random.choice(list(attributes.items()))
		return random_attr


# pool space
class _POOL_Space:
	def __init__(self):
		self.e = "NoConnection"
		# 作为connection 使用 1乘1的最大池化直接连接
		self.f = nn.MaxPool2d(kernel_size=1)
		self.g = nn.AvgPool2d(kernel_size=3)
		self.h = nn.MaxPool2d(kernel_size=3)

	def random_select(self):
		attributes = list(self.__dict__.items())
		return random.choice(attributes)

	def random_select_NoFA(self):
		attributes = {k: v for k, v in self.__dict__.items() if k != 'FA'}

		# 如果没有剩余属性，返回 None
		if not attributes:
			return None

		# 随机选择一个剩余的属性及其值
		random_attr = random.choice(list(attributes.items()))
		return random_attr


class _Cell_Node:
	def __init__(self, in_channels, out_channels):
		self.Node_0 = nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=1, stride=1)
		self.Node_1 = nn.Conv2d(in_channels, out_channels, kernel_size=5, padding=2, stride=1)
		self.Node_2 = nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=1, stride=1)
		self.Node_3 = nn.Conv2d(in_channels, out_channels, kernel_size=5, padding=2, stride=1)

	def all_node(self):
		attributes = list(self.__dict__.items())
		return attributes


class cell_node(nn.Module):
	def __init__(self, in_channels, out_channels, kernel_size, padding,stride=1):
		super(cell_node, self).__init__()
		self.kernel_size=kernel_size
		self.out_channels =out_channels
		self.padding = padding
		self.hidden_layer = nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, padding=padding, stride=stride)

	def get_property(self):
		return [self.out_channels,self.kernel_size,self.padding]

	def forward(self, x):
		return self.hidden_layer(x)


class FactorizedReduce(nn.Module):
	def __init__(self, C_in, C_out, affine=True):
		super(FactorizedReduce, self).__init__()
		assert C_out % 2 == 0
		self.relu = nn.ReLU(inplace=False)
		self.conv_1 = nn.Conv2d(C_in, C_out *2, 1, stride=2, padding=0, bias=False)
		self.bn = nn.BatchNorm2d(num_features=C_out *2, affine=affine)

	def forward(self, x):
		output = self.conv_1(x)
		output = self.bn(output)
		output = self.relu(output)
		return output


class _Cell_decode(nn.Module):
	def __init__(self, real_nns, matrix_nodes, nodes, _conv, _pool):
		super(_Cell_decode, self).__init__()
		self.result_nns = real_nns
		self.matrix_nodes = matrix_nodes
		self._conv = _conv
		self._pool = _pool
		self.nodes = nodes
		# 扩充通道到16通道
		self.temp_conv = nn.Conv2d(3, 16, kernel_size=1)

		self.my_layers = nn.ModuleDict()

		self.Liner_layer = lambda num: nn.Sequential(
			nn.Flatten(),
			nn.Dropout(0.3),
			nn.ReLU(),
			nn.Linear(num, 2)
		)

		for index, real_nn in enumerate(self.result_nns):
			for i in range(len(real_nn)):
				temp_index = []
				for j in range(len(real_nn[i])):
					if isinstance(real_nn[i][j], nn.Module):
						# 通过index唯一表示一个键
						key = f'layer_{i}_{j}_{index}'
						temp_index.append(key)
						self.my_layers[key] = real_nn[i][j]
	def get_shape(self, x):
		size = x.size()
		num_dims = x.dim()
		if num_dims >= 3:
			height = size[-2]
			width = size[-1]
			return height * width, size[1]
		
	def pad_tensor(self,my_tensor, max_C, max_H, max_W):
		pad_C = max_C - my_tensor.size(1)
		pad_H = max_H - my_tensor.size(2)
		pad_W = max_W - my_tensor.size(3)
		# 在每个维度的两侧填充
		padding = (0, pad_W, 0, pad_H, 0, pad_C)
		return F.pad(my_tensor, padding, mode='constant', value=0)
	# 对数据进行填充
	def input_pad(self,out):
		B_=[]
		C_ =[]
		H_ =[]
		W_ =[]
		padded_tensors = []
		for i, my_tensor in enumerate(out):
			B_.append(my_tensor.size(0))
			C_.append(my_tensor.size(1))
			H_.append(my_tensor.size(2))
			W_.append(my_tensor.size(3))
		B_max = max(B_)
		C_max = max(C_)
		H_max = max(H_)
		W_max = max(W_)

		result  = torch.zeros((B_max, C_max, H_max, W_max))
		for my_tensor in out:
			new_tensor = self.pad_tensor(my_tensor,C_max,H_max,W_max)
			# padded_tensors.append(new_tensor)
			# 不使用cat会导致维度改变 使用直接相加
			result = result+new_tensor
		# 将张量列表沿指定维度进行连接
		# result = torch.cat(padded_tensors, dim=0)
		 
		return result
	
	def get_ture_inchannel(self,data_channel,origin_cahnnel):
		return data_channel if data_channel>=origin_cahnnel else origin_cahnnel
	# 对不同通道数据构造inchan和outchan的神经节点
	def get_new_nn(self,my_model,data_input,flag):
		# 获取模型类
		model_class = type(my_model)
		in_channel = data_input.size(1)
		new_model=None
		propertys = my_model.get_property()
		if flag ==2 :
			# 判断一下通道数大小 如果给定数据通道大于原本通道则使用给定数据通道
			in_channel = self.get_ture_inchannel(in_channel,propertys[0])
			# in_channels, out_channels, kernel_size, padding
			new_model = model_class(in_channels=in_channel,out_channels=in_channel,kernel_size=propertys[1],padding=propertys[2])
		else:
			in_channel = self.get_ture_inchannel(in_channel,propertys[0])
			# in_channels, out_channels, kernel_size, padding
			new_model = model_class(in_channels=in_channel,out_channels=in_channel,kernel_size=propertys[1])
		return new_model
	
	def forward(self, x):
		out = x
		# 提升到16通道
		out = self.temp_conv(out)

		input_1 = []
		# NRNRN的组合 NR NR N 循环
		controls = 3
		for control in range(controls):
			# 前Neraul个为第一个N细胞 后Neraul个为R细胞
			for matrix_index, matrix in enumerate(self.matrix_nodes):
				input_1 = [out, out]
				# N和R细胞交替循环
				for i in range(matrix.shape[0]):
					temp_out = []
					for j in range(matrix.shape[1]):
						key = f'layer_{i}_{j}_{matrix_index}'
						if matrix[i][j] == -1:
							# 控制输出
							temp_out.append(self.my_layers[key](input_1[j]))
						elif matrix[i][j] == 1:
							# 控制输出
							new_model = self.get_new_nn(self.my_layers[key],input_1[j],1)
							temp_out.append(new_model(input_1[j]))
						elif matrix[i][j] == 2:
							# out = torch.cat(temp_out, dim=0)
							out = self.input_pad(temp_out)

							new_model = self.get_new_nn(self.my_layers[key],out,2)

							out = new_model(out)
							input_1.append(out)
							break
				# 得到通道数
				out = input_1[len(input_1) - 1]
				if matrix_index == 1:
					cin_channel = out.size(1)
					fredu = FactorizedReduce(cin_channel, cin_channel)
					out = fredu(out)

				# 	走完最后一个N细胞再break
				if control == controls - 1:
					break

		out = input_1[len(input_1) - 1]
		num, channel = self.get_shape(out)
		out = self.Liner_layer(num * channel)(out)
		return out


