from torch import nn
import torch


class _DomainSpecificBatchNorm(nn.Module):
    _version = 2

    def __init__(self, num_features, num_classes, eps=1e-5, momentum=0.1, affine=True,
                 track_running_stats=True):
        super(_DomainSpecificBatchNorm, self).__init__()
        #         self.bns = nn.ModuleList([nn.modules.batchnorm._BatchNorm(num_features, eps, momentum, affine, track_running_stats) for _ in range(num_classes)])
        num_classes = 2
        self.bns = nn.ModuleList([nn.BatchNorm2d(num_features) for _ in range(num_classes)])
        # self.bns =  nn.ModuleList([nn.BatchNorm2d(num_features)])  # , eps, momentum, affine, track_running_stats)

    def reset_running_stats(self):
        for bn in self.bns:
            bn.reset_running_stats()

    def reset_parameters(self):
        for bn in self.bns:
            bn.reset_parameters()

    def _check_input_dim(self, input):
        raise NotImplementedError

    def forward(self, x, N):
        self._check_input_dim(x)
        # # bn = self.bns[0]  # domain_label[0]]
        # # return bn(x), domain_label
        # x0 = x[:N]
        # x1 = x[N:]
        # x0 = self.bns[0](x0)
        # x1 = self.bns[1](x1)
        # return torch.cat((x0, x1)), N
        # return self.bns[0](x), N
        return 0.5 * (self.bns[0](x) + self.bns[1](x)), N

class DomainSpecificBatchNorm2d(_DomainSpecificBatchNorm):
    def _check_input_dim(self, input):
        if input.dim() != 4:
            raise ValueError('expected 4D input (got {}D input)'
                             .format(input.dim()))
