import torch
import torch.nn as nn
import torch.nn.functional as F
from layers.Embed import DataEmbedding
from models.TimesNet import Model, TimesBlock

class Neck_V1(nn.Module):
    def __init__(self, configs):
        super().__init__()
        d_model = configs.d_model
        hidden_dims = 8
        self.fc11 = nn.Linear(d_model, hidden_dims, bias=True)
        self.fc2 = nn.Linear(hidden_dims, configs.d_model, bias=True)

    def forward(self, input):
        x = input.reshape(-1, input.shape[2]) # [BxT, C]
        
        x1 = self.fc11(x)
        x = self.fc2(x1)
        
        enc_out = x.reshape(input.shape[0], input.shape[1], -1)
        # Accuracy : 0.9885, Precision : 0.8843, Recall : 0.8311, F-score : 0.8569, AUC: 0.7556 
        return enc_out, x1

class Model(Model):
    def __init__(self, configs):
        super().__init__(configs)
        self.enc_embedding = DataEmbedding(configs.enc_in, configs.d_model, configs.embed, configs.freq,
                                           configs.dropout)
        # self.neck = Neck_V1(configs)
        self.neck = Neck_V1(configs)

    def forward(self, x_enc, x_mark_enc, x_dec, x_mark_dec, mask=None):
        if self.task_name == 'anomaly_detection':
            if self.post_process == 'anomaly_v1':
                dec_out, val = self.anomaly_detection_v2(x_enc)
                if self.training:
                    return {"dec_out":dec_out, "val":val}
                else: # 预测阶段只是单输出，解码输出
                    return dec_out  # [B, L, D]
            elif self.post_process == 'anomaly_v2':
                dec_out, val = self.anomaly_detection_v2(x_enc)
                if self.training:
                    return {"dec_out":dec_out, "val":val}  # dec_out [B, L, D]
                else: # 预测阶段是双输出，含解码输出，前端编码
                    return dec_out, val # [B, L, D]
        return None

    def anomaly_detection_v2(self, x_enc):
        # Normalization from Non-stationary Transformer
        means = x_enc.mean(1, keepdim=True).detach()
        x_enc = x_enc - means
        stdev = torch.sqrt(
            torch.var(x_enc, dim=1, keepdim=True, unbiased=False) + 1e-5)
        x_enc /= stdev

        # embedding
        x = self.enc_embedding(x_enc, None)  # [B,T,C]
        enc_out, val = self.neck(x)

        # TimesNet
        for i in range(self.layer):
            enc_out = self.layer_norm(self.model[i](enc_out))
        # porject back
        dec_out = self.projection(enc_out)
        
        # De-Normalization from Non-stationary Transformer
        dec_out = dec_out * \
                  (stdev[:, 0, :].unsqueeze(1).repeat(
                      1, self.pred_len + self.seq_len, 1))
        dec_out = dec_out + \
                  (means[:, 0, :].unsqueeze(1).repeat(
                      1, self.pred_len + self.seq_len, 1))
            
        return dec_out, val


# Accuracy : 0.9885, Precision : 0.8844, Recall : 0.8321, F-score : 0.8574, AUC: 0.7548 anomaly_detection
# Accuracy : 0.9890, Precision : 0.8335, Recall : 0.9196, F-score : 0.8744, AUC: 0.7548 anomaly_detection_v2    
    @staticmethod
    def custom_loss(input, target):
        if isinstance(input, torch.Tensor):
            loss = F.mse_loss(input, target)
        else:
            w0 = 0.1
            w1 = 0.01
            x1 = input["dec_out"]
            enc = input["val"]

            distances = torch.norm(enc, p=2, dim=1)
            distances = torch.relu(distances - 0.5)
            proximity_loss = torch.mean(1.0 / (distances + 1e-6))
            
            # d1 = F.l1_loss(v, v_m, reduction='mean')
            
            loss = F.mse_loss(x1, target) +  w0* proximity_loss
        return loss
