import torch.nn as nn
import torch

class attCoRA(nn.Module):
	def __init__(self, in_sz, hd_sz, out_cls):
		super(attCoRA, self).__init__()
		self.out_cls = out_cls
		self.in_sz = in_sz
		base = hd_sz
		self.encoder = nn.Sequential(
			nn.Linear(in_sz, base * 2),
			nn.ReLU(),
			nn.BatchNorm1d(base * 2),
			nn.Linear(base * 2, base),
			nn.ReLU()
			)
		self.classifier = nn.Sequential(
			nn.Linear(base, out_cls),
			nn.Softmax(dim=1)
			)
		self.decoder1 = nn.Sequential(
			nn.BatchNorm1d(base),
			nn.Linear(base, base * 2),
			nn.ReLU(),
			nn.BatchNorm1d(base * 2),
			nn.Linear(base * 2, in_sz),
			nn.ReLU()
			)
		self.decoder2 = nn.ModuleList([nn.Sequential(
			nn.BatchNorm1d(base),
			nn.Linear(base, base * 2),
			nn.ReLU(),
			nn.BatchNorm1d(base * 2),
			nn.Linear(base * 2, in_sz),
			nn.ReLU()
			) for i in range(out_cls)])

	def forward(self, x, channel):
		z = self.encoder(x)
		if channel == 1:
			return self.decoder1(z), z
		else:
			scr = self.classifier(z).unsqueeze(1)#att-like
			R = torch.cat([self.decoder2[i](z).unsqueeze(1) for i in range(self.out_cls)], 1)
			return torch.matmul(scr, R).squeeze(), z
