import torch.nn as nn
from torch.autograd import Function
from meta_layers import *
num=50
class revegrad(Function):
    @staticmethod
    def forward(ctx,x,alpha):
        ctx.alpha = alpha
        return x.view_as(x)

    @staticmethod
    def backward(ctx,grad_output):
        output = grad_output.neg() * ctx.alpha
        return output,None

class Lenet(nn.Module):
    def __init__(self):
        super(Lenet,self).__init__()
        self.fc1 = nn.Linear(2,num)
        self.relu1 = nn.ReLU()
        self.fc2 = nn.Linear(num,num)
        self.relu2 = nn.ReLU()
        self.fc3 = nn.Linear(num,num)
        self.conv = nn.Sequential(
            self.fc1,self.relu1,self.fc2,self.relu2,self.fc3
        )
    def forward(self,x):
        x = self.conv(x)
        return x

class Meta_Lenet(MetaModule):
    def __init__(self):
        super(Meta_Lenet,self).__init__()
        self.fc1 = MetaLinear(2,num)
        self.relu1 = nn.ReLU()
        self.fc2 = MetaLinear(num,num)
        self.relu2 = nn.ReLU()
        self.fc3 = MetaLinear(num,num)
        self.conv = nn.Sequential(
            self.fc1,self.relu1,self.fc2,self.relu2,self.fc3
        )
    def forward(self,x):
        x = self.conv(x)
        return x

class domain(nn.Module):
    def __init__(self):
        super(domain,self).__init__()
        self.fc1 = nn.Linear(num,num)
        self.fc2 = nn.Linear(num,num)
        self.fc3 = nn.Linear(num,2)
        self.discriminator = nn.Sequential(
            self.fc1,self.fc2,self.fc3
        )
    def forward(self,x,alpha=1):
        x = revegrad.apply(x,alpha)
        x = self.discriminator(x)
        return x

class cdan_domain(nn.Module):
    def __init__(self):
        super(cdan_domain,self).__init__()
        self.fc1 = nn.Linear(num*2,num)
        self.fc2 = nn.Linear(num,num)
        self.fc3 = nn.Linear(num,2)
        self.discriminator = nn.Sequential(
            self.fc1,self.fc2,self.fc3
        )
    def forward(self,x,alpha=1):
        x = revegrad.apply(x,alpha)
        x = self.discriminator(x)
        return x

class classifier(nn.Module):
    def __init__(self):
        super(classifier,self).__init__()
        self.fc1 = nn.Linear(num,num)
        self.fc2 = nn.Linear(num,num)
        self.fc3 = nn.Linear(num,2)
    def forward(self,x,is_revegrad=False,alpha=1):
        if is_revegrad:
            x = revegrad.apply(x,alpha)
        x = self.fc1(x)
        x = self.fc2(x)
        x = self.fc3(x)
        return x