import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
import random
import time


class CustomBN1D(nn.Module):

    def __init__(self, num_features, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True):
        super().__init__()
        self.eps = eps
        self.momentum = momentum
        self.affine = affine
        if self.affine:
            self.weight = nn.Parameter(torch.ones(num_features))
            self.bias = nn.Parameter(torch.zeros(num_features))
        else:
            self.weight = None
            self.bias = None
        self.track_running_stats = track_running_stats
        self.total_batch_mean = torch.ones(num_features)
        self.total_batch_var = torch.zeros(num_features)
        self.training = True

    def get_mean_var(self, x):

        if self.training:
            b, c, f = x.shape
            print(x.shape)
            x_trans = x.permute(1, 0, 2)
            curr_batch_mean = x_trans.contiguous().view(c, -1).mean(dim=-1)
            curr_batch_var = x_trans.contiguous().view(c, -1).var(dim=-1, unbiased=False)
            x_hat = (x_trans.T - curr_batch_mean).div(torch.sqrt(curr_batch_var + self.eps)).T
            x_hat = x_hat.permute(1, 0, 2)
        else:
            curr_batch_mean, curr_batch_var = self.total_batch_mean, self.total_batch_var
            x_hat = (x - curr_batch_mean).div(torch.sqrt(curr_batch_var + self.eps))

        if self.affine:
            transout = self.weight * x_hat + self.bias

        return curr_batch_mean, curr_batch_var, transout

    def running_update(self, curr_batch_mean, curr_batch_var):
        if self.training:
            self.total_batch_mean = (1 - self.momentum) * curr_batch_mean + self.momentum * self.total_batch_mean
            self.total_batch_var = (1 - self.momentum) * curr_batch_var + self.momentum * self.total_batch_var

    def forward(self, x):
        # x shape : b,c,f
        assert len(x.shape) <= 3 and len(x.shape) >= 2, ValueError("input shape is error {xxxxxxxx}")
        curr_batch_mean, curr_batch_var, transout = self.get_mean_var(x)
        self.running_update(curr_batch_mean, curr_batch_var)
        return transout
test_input = torch.rand(2,2,2)
bn1d = nn.BatchNorm1d(2,affine=True,eps=1e-05,momentum=0.1)
test_output = bn1d(test_input)
test_output
len(test_input.shape) <= 3 and len(test_input.shape) >= 2
my_bn = CustomBN1D(2)
test_output = my_bn(test_input)
test_output
test_input1 = torch.rand(3,3,3,3)
my_bn_1 = CustomBN1D(2)
test_output1 = my_bn_1(test_input1)
test_output1
test_input2 = torch.rand(2,2,2,2)
bn1d = nn.BatchNorm1d(2,affine=True,eps=1e-05,momentum=0.1)
test_output2 = bn1d(test_input2)
test_output2