import torch
import torch.nn as nn
import torch.nn.functional as F


class TCNBlock_spk(nn.Module):
    def __init__(
        self,
        in_channle,
        spk_emb_dim,
        conv_channel,
        kernel_size=3,
        dilation=1,
        casual=False,
    ):
        super().__init__()
        self.conv1x1 = nn.Conv1d(in_channle + spk_emb_dim, conv_channel, 1)
        self.prelu1 = nn.PReLU()
        if casual == False:
            self.norm1 = nn.GroupNorm(1, conv_channel)
            dconv_pad = (dilation * (kernel_size - 1)) // 2
        else:
            pass
        ## TODO: See the padding
        self.dconv = nn.Conv1d(
            conv_channel,
            conv_channel,
            kernel_size,
            groups=conv_channel,
            padding=dconv_pad,
            dilation=dilation,
            bias=True,
        )
        self.prelu2 = nn.PReLU()
        self.norm2 = nn.GroupNorm(1, conv_channel)
        self.sconv = nn.Conv1d(conv_channel, in_channle, 1, bias=True)

    def forward(self, x, aux):
        aux1 = aux.repeat(1, 1, x.shape[-1])
        y = torch.cat([x, aux1], 1)
        y = self.conv1x1(y)
        y = self.norm1(self.prelu1(y))
        y = self.dconv(y)
        y = self.norm2(self.prelu2(y))
        y = self.sconv(y)
        y += x
        return y


class TCNBlock(nn.Module):
    def __init__(
        self,
        in_channel=256,
        conv_channel=512,
        kernel_size=3,
        dilation=1,
        causal=False,
    ):
        super().__init__()
        self.conv1x1 = nn.Conv1d(in_channel, conv_channel, 1)
        self.prelu1 = nn.PReLU()
        if causal == False:
            self.norm1 = nn.GroupNorm(1, conv_channel)
            dconv_pad = dilation * (kernel_size - 1) // 2
        else:
            pass
        self.dconv = nn.Conv1d(
            conv_channel,
            conv_channel,
            kernel_size,
            groups=conv_channel,
            padding=dconv_pad,
            dilation=dilation,
            bias=True,
        )
        self.prelu2 = nn.PReLU()
        self.norm2 = nn.GroupNorm(1, conv_channel)
        self.sconv = nn.Conv1d(conv_channel, in_channel, 1, bias=True)
        self.causal = causal

    def forward(self, x):
        y = self.conv1x1(x)
        y = self.norm1(self.prelu1(y))
        y = self.dconv(y)
        if self.causal:
            pass
        y = self.norm2(self.prelu2(y))
        y = self.sconv(y)
        y += x
        return y


class TCNStack:
    def build_TCNStack(num_blocks, in_channels, conv_channels, kernel_size, causal):
        return nn.Sequential(
            *[
                TCNBlock(in_channels, conv_channels, kernel_size, 2**i, causal)
                for i in range(1, num_blocks)
            ]
        )

    pass


class ResNet(nn.Module):
    def __init__(self, in_dim, out_dim):
        super().__init__()
        self.in_dim = in_dim
        self.out_dim = out_dim
        self.conv1 = nn.Conv1d(in_dim, out_dim, kernel_size=1, bias=False)
        self.conv2 = nn.Conv1d(out_dim, out_dim, kernel_size=1, bias=False)
        self.bn1 = nn.BatchNorm1d(out_dim)
        self.bn2 = nn.BatchNorm1d(out_dim)
        self.prelu1 = nn.PReLU()
        self.prelu2 = nn.PReLU()
        self.maxpool = nn.MaxPool1d(3)
        if in_dim < out_dim:
            self.downsample = True
            self.conv_downsample = nn.Conv1d(in_dim, out_dim, kernel_size=1, bias=False)
            pass
        else:
            self.downsample = False
            pass

    def forward(self, x):
        y = self.conv1(x)
        y = self.bn1(y)
        y = self.prelu1(y)
        y = self.conv2(y)
        y = self.bn2(y)
        if self.downsample:
            y += self.conv_downsample(x)
        else:
            y += x
        y = self.prelu2(y)
        y = self.maxpool(y)
        return y

        pass
