import torch.nn as nn
import torch

def get_conv2d(c1,c2,k,s,p,d,g,bias=False):
    conv = nn.Conv2d(c1,c2,kernel_size=k,stride=s,padding=p,dilation=d,groups=g,bias=bias)
    return conv

def get_activation(act_type=None):
    if act_type == "relu":
        return nn.ReLU(inplace=True)
    elif act_type == "lrelu":
        return nn.LeakyReLU(0.1,inplace=True)
    else:
        raise NotImplementedError("Activation {} not implemented".format(act_type))

def get_norm(norm_type,dim):
    if norm_type == 'bn':
        return nn.BatchNorm2d(dim) 
    else:
        raise NotImplementedError("normalization {} not implemented".format(norm_type))
    
class Conv(nn.Module):
    def __init__(self,
                 c1,
                 c2,
                 k=1,
                 s=1,
                 p=0,
                 d=1,
                 act_type='lrelu',
                 norm_type='bn'
                 ):
        super(Conv,self).__init__()
        convs = []
        add_bias = False if norm_type else True
        
        convs.append(get_conv2d(c1,c2,k,s,p,d,1,add_bias))
        if norm_type:
            convs.append(get_norm(norm_type,c2))
        if act_type:
            convs.append(get_activation(act_type))
        self.convs = nn.Sequential(*convs)
        
    def forward(self,x):
        return self.convs(x)

