# -*- coding: utf-8 -*-
"""
Created on July 11 11:00:37 2022

@author: JoeJy
"""


import torch.nn as nn
import numpy as np

def calc_coeff(iter_num, high=1.0, low=0.0, alpha=10.0, max_iter=10000.0):
    return float(2.0 * (high - low) / (1.0 + np.exp(-alpha*iter_num / max_iter)) - (high - low) + low)

def grl_hook(coeff):
    def fun1(grad):
        return -coeff*grad.clone()
    return fun1

class BasedCNN(nn.Module):
    def __init__(self):
        super(BasedCNN, self).__init__()
        self.convfeature = nn.Sequential()
        self.convfeature.add_module('c_conv1', nn.Conv1d(1, 32, kernel_size=64, padding=32))
        self.convfeature.add_module('f_bn1', nn.BatchNorm1d(32))
        self.convfeature.add_module('c_relu1', nn.LeakyReLU(inplace=True))
        self.convfeature.add_module('c_pool1', nn.MaxPool1d(kernel_size=2, stride=2))
        
        self.convfeature.add_module('c_conv2', nn.Conv1d(32, 48, kernel_size=16, stride=1, padding=8))
        self.convfeature.add_module('f_bn2', nn.BatchNorm1d(48))
        self.convfeature.add_module('c_relu2', nn.LeakyReLU(inplace=True))
        self.convfeature.add_module('c_pool2', nn.MaxPool1d(kernel_size=2, stride=2))
        
        self.convfeature.add_module('c_conv3', nn.Conv1d(48, 64, kernel_size=5, stride=1, padding=2))
        self.convfeature.add_module('f_bn3', nn.BatchNorm1d(64))
        self.convfeature.add_module('c_relu3', nn.LeakyReLU(inplace=True))
        self.convfeature.add_module('c_pool3', nn.MaxPool1d(kernel_size=2, stride=2))
        
        self.convfeature.add_module('c_conv4', nn.Conv1d(64, 64, kernel_size=5, stride=1, padding=2))
        self.convfeature.add_module('f_bn4', nn.BatchNorm1d(64))
        self.convfeature.add_module('c_relu4', nn.LeakyReLU(inplace=True))
        self.convfeature.add_module('c_pool4', nn.MaxPool1d(kernel_size=2, stride=2))
        
        self.fc1 = nn.Sequential()
        self.fc1.add_module('f_fc1', nn.Linear(64*64*8, 512))
        self.fc1.add_module('f_bn1', nn.BatchNorm1d(512))
        self.fc1.add_module('f_relu1', nn.LeakyReLU(inplace=True))
        # self.fc1.add_module('f_drop1', nn.Dropout())
        
#        self.fc2 = nn.Sequential()
#        self.fc2.add_module('f_fc2', nn.Linear(1024, 256))
#        self.fc2.add_module('f_bn2', nn.BatchNorm1d(256))
#        self.fc2.add_module('f_relu2', nn.ReLU(inplace=True))
##        self.fc2.add_module('f_drop2', nn.Dropout())
        
        self.class_classifier = nn.Sequential()
        self.class_classifier.add_module('c_fc', nn.Linear(512, 4))
                
    def forward(self, data):
        # print(data.shape)
        feature = self.convfeature(data)
        # print(feature.shape)
        feature = feature.view(-1, 64*64*8)
        # print(feature.shape)
        f1 = self.fc1(feature)
        # print(f1.shape)
        self.myfeature = f1
        class_out = self.class_classifier(f1)
        return f1, class_out
    
        
class AdversarialNetwork(nn.Module):
    def __init__(self, in_feature, hidden_size, max_iter):
        super(AdversarialNetwork, self).__init__()
        self.ad_layer1 = nn.Linear(in_feature, hidden_size)
        self.ad_layer2 = nn.Linear(hidden_size, hidden_size)
        self.ad_layer3 = nn.Linear(hidden_size, 1)
        self.relu1 = nn.LeakyReLU()
        self.relu2 = nn.LeakyReLU()
        # self.dropout1 = nn.Dropout(0.5)
        # self.dropout2 = nn.Dropout(0.5)
        self.sigmoid = nn.Sigmoid()
        
        self.iter_num = 0
        self.alpha = 10
        self.low = 0.0
        self.high = 1.0
        self.max_iter = max_iter
        
    def forward(self, x):
        if self.training:
            self.iter_num += 1
        coeff = calc_coeff(self.iter_num, self.high, self.low, self.alpha, self.max_iter)
        x = x * 1.0
        x.register_hook(grl_hook(coeff))
        # print(x.size())
        x = self.ad_layer1(x)
        x = self.relu1(x)
        # x = self.dropout1(x)
        x = self.ad_layer2(x)
        x = self.relu2(x)
        # x = self.dropout2(x)
        y = self.ad_layer3(x)
        y = self.sigmoid(y)
        return y
    
    
    
        