import torch
import torch.nn as nn

class attAE(nn.Module):

	def __init__(self, in_sz, hd_sz, out_cls):
		
		super(attAE, self).__init__()
		self.out_cls = out_cls
		self.in_sz = in_sz
		base = hd_sz

		self.encoder = nn.ModuleList([nn.Sequential(
			nn.Linear(in_sz, base * 2),
			nn.ReLU(),
			nn.BatchNorm1d(base * 2),
			nn.Linear(base * 2, base),
			nn.ReLU()
			) for i in range(out_cls)])
		
		self.decoder = nn.ModuleList([nn.Sequential(
			nn.BatchNorm1d(base),
			nn.Linear(base, base * 2),
			nn.ReLU(),
			nn.BatchNorm1d(base * 2),
			nn.Linear(base * 2, in_sz),
			nn.ReLU()
			) for i in range(out_cls)])
		
		self.attW = nn.Linear(in_sz, base * 2)
		self.attV = nn.Linear(base, base * 2)
		self.attTanh = nn.Tanh()
		self.attv = nn.Linear(base * 2, 1, bias=False)
		self.attSoftmax = nn.Softmax(dim=1)

	def forward(self, x):

		H = torch.cat([self.encoder[i](x).unsqueeze(1) for i in range(self.out_cls)], 1)
		A = self.attSoftmax(torch.cat([self.attv(self.attTanh(torch.add(self.attW(x),self.attV(H[:,i,:])))) for i in range(self.out_cls)], 1))
		R = torch.cat([self.decoder[i](self.encoder[i](x)).unsqueeze(1) for i in range(self.out_cls)], 1)
		return R, A


