from typing import List

import torch
from torch import nn

from .basic_modules import CBA, ReChannel, UBlock


class TDNode(nn.Module):
    def __init__(self, inc1, inc2, act_op, fusion):
        super().__init__()
        self.fusion = fusion
        self.down_sample = nn.MaxPool2d(2, 2)
        self.act = act_op(inplace=True)
        if self.fusion == "add":
            self.cba = CBA(inc1, inc1, 3, activation=act_op)
        elif self.fusion == "cat":
            self.cba = CBA(inc1 + inc2, inc1, 3, activation=act_op)

    def forward(self, xi, xo, xu):
        if self.fusion == "add":
            x = xi + xu + self.down_sample(xo)
        elif self.fusion == "cat":
            x = torch.cat([xi + xu, self.down_sample(xo)], 1)
        return self.cba(self.act(x))


class BDNode(nn.Module):
    def __init__(self, cin1, cin2, act_op, fusion):
        super().__init__()
        self.fusion = fusion
        self.down_sample = nn.MaxPool2d(2, 2)
        self.act = act_op(inplace=True)
        if self.fusion == "add":
            self.cba = CBA(cin1, cin1, 3, activation=act_op)
        elif self.fusion == "cat":
            self.cba = CBA(cin1 + cin2, cin1, 3, activation=act_op)

    def forward(self, xi, xo):
        if self.fusion == "add":
            x = xi + self.down_sample(xo)
        elif self.fusion == "cat":
            x = torch.cat([xi, self.down_sample(xo)], 1)
        return self.cba(self.act(x))


class BiFPNBlock(nn.Module):
    def __init__(self, channels, act_op, fusion):
        super().__init__()
        self.fusion = fusion
        self.up = UBlock(channels, act_op, self.fusion)
        self.td_nodes = nn.ModuleList(
            [
                TDNode(channels[i + 1], channels[i], act_op, self.fusion)
                for i in range(len(channels) - 2)
            ]
        )
        self.bd_node = BDNode(channels[-1], channels[-2], act_op, fusion)

    def forward(self, x):
        hidden = self.up(x)
        outs = [hidden[-1]]
        for node, x_i, x_u in zip(self.td_nodes, x[1:], hidden[-2::-1]):
            outs.append(node(x_i, outs[-1], x_u))
        outs.append(self.bd_node(x[-1], outs[-1]))
        return outs


class BiFPN(nn.Module):
    def __init__(
        self, in_channels, out_channels, repeats, fusion="add", act_op=nn.ReLU, merge=True
    ):
        """
        实现BiFPN Neck
        :param in_channels: 输入特征的通道数
        :param out_channels: 输出特征的通道数，可以是 1.None(此时使用in_channels);2.int值(此时进行ReChannel);3.List值(此时进行ReChannel)
        :param repeats: 叠加次数
        :param fusion: "add" or "cat"， 特征融合的方式：+ or torch.cat
        :param act_op: 激活函数， default:nn.ReLU
        """
        super().__init__()
        self.merge = merge
        if not isinstance(out_channels, int) and fusion == "add":
            print("fusion operation is error, channels  is different")
        if (not out_channels) or (out_channels == in_channels):
            out_channels = in_channels
            self.blocks = nn.Sequential(
                *[BiFPNBlock(out_channels, act_op, fusion) for _ in range(repeats)],
                UBlock(out_channels, act_op, fusion),
            )
        elif isinstance(out_channels, int):
            out_channels = [out_channels] * len(in_channels)
            self.blocks = nn.Sequential(
                ReChannel(in_channels, out_channels, act_op),
                *[BiFPNBlock(out_channels, act_op, fusion) for _ in range(repeats)],
                UBlock(out_channels, act_op, fusion),
            )
        elif isinstance(out_channels, List):
            self.blocks = nn.Sequential(
                ReChannel(in_channels, out_channels, act_op),
                *[BiFPNBlock(out_channels, act_op, fusion) for _ in range(repeats)],
                UBlock(out_channels, act_op, fusion),
            )

    def forward(self, x):
        if self.merge:
            return self.blocks(x)[-1]
        else:
            return self.blocks(x)
