import torch
from timm.models.efficientnet import tf_efficientnet_b0_ns
from torch import nn
import torch.nn.functional as F

from timm.models.xception import Block
from torchvision import models

__all__ = ['hierarchical_fusion']

from training.models import vit_base_patch16_224


class ThriceConv(nn.Module):
    def __init__(self, in_channels, out_channels):
        super().__init__()
        self.thrice_conv = nn.Sequential(
            nn.MaxPool2d(2),
            nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=1),
            nn.BatchNorm2d(out_channels),
            nn.ReLU(inplace=True),
            nn.Conv2d(out_channels, out_channels, kernel_size=3, padding=1),
            nn.BatchNorm2d(out_channels),
            nn.ReLU(inplace=True),
            nn.Conv2d(out_channels, out_channels, kernel_size=3, padding=1),
            nn.BatchNorm2d(out_channels),
            nn.ReLU(inplace=True)
        )

    def forward(self, x):
        return self.thrice_conv(x)

class TwiceConv_Max(nn.Module):
    def __init__(self, in_channels, out_channels):
        super().__init__()
        self.twice_conv_max = nn.Sequential(
            nn.MaxPool2d(2),
            nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=1),
            nn.BatchNorm2d(out_channels),
            nn.ReLU(inplace=True),
            nn.Conv2d(out_channels, out_channels, kernel_size=3, padding=1),
            nn.BatchNorm2d(out_channels),
            nn.ReLU(inplace=True)
        )

    def forward(self, x):
        return self.twice_conv_max(x)

class TwiceConv(nn.Module):
    def __init__(self, in_channels, out_channels):
        super().__init__()
        self.twice_conv = nn.Sequential(
            nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=1),
            nn.BatchNorm2d(out_channels),
            nn.ReLU(inplace=True),
            nn.Conv2d(out_channels, out_channels, kernel_size=3, padding=1),
            nn.BatchNorm2d(out_channels),
            nn.ReLU(inplace=True)
        )

    def forward(self, x):
        return self.twice_conv(x)

class OutConv(nn.Module):
    def __init__(self, in_channels, out_channels):
        super(OutConv, self).__init__()
        self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=1)

    def forward(self, x):
        return self.conv(x)

class DeConv(nn.Module):
    def __init__(self, in_channels, out_channels):
        super(DeConv, self).__init__()
        self.deconv = nn.ConvTranspose2d(in_channels, out_channels, kernel_size=2, stride=2)

    def forward(self, x):
        return self.deconv(x)

class SiblingBranch(nn.Module):

    def __init__(self, in_channels, n_classes):
        super(SiblingBranch, self).__init__()

        self.Conv1 = TwiceConv(in_channels, 64)
        self.Conv2 = TwiceConv_Max(64, 128)
        self.Conv3 = ThriceConv(128, 256)
        self.Conv4 = ThriceConv(256, 512)
        self.Conv5 = ThriceConv(512, 512)
        self.DeConv1 = DeConv(512, 512)
        self.DeConv2 = DeConv(512, 256)
        self.DeConv3 = DeConv(256, 128)
        self.DeConv4 = DeConv(128, 64)
        self.OutConv_F1 = OutConv(1024, 512)
        self.OutConv_F2 = OutConv(1536, 512)
        self.OutConv_F3 = OutConv(768, 256)
        self.OutConv_F4 = OutConv(384, 128)
        self.OutConv_F5 = OutConv(192, 64)
        self.regression = OutConv(512, n_classes)
        self.regression_fusing = OutConv(64, n_classes)

    def forward(self, x1, x2):
        # output_left = []
        L1 = self.Conv1(x1)
        L2 = self.Conv2(L1)
        L3 = self.Conv3(L2)
        L4 = self.Conv4(L3)
        L5 = self.Conv5(L4)
        regression_left = self.regression(L5)

        # output_left.append(x1)
        # output_left.append(L1)
        # output_left.append(L2)
        # output_left.append(L3)
        # output_left.append(L4)
        # output_left.append(L5)
        # output_left.append(regression_left)

        # output_right = []
        R1 = self.Conv1(x2)
        R2 = self.Conv2(R1)
        R3 = self.Conv3(R2)
        R4 = self.Conv4(R3)
        R5 = self.Conv5(R4)
        regression_right = self.regression(R5)

        # output_right.append(x2)
        # output_right.append(R1)
        # output_right.append(R2)
        # output_right.append(R3)
        # output_right.append(R4)
        # output_right.append(R5)
        # output_right.append(regression_right)

        F1 = self.OutConv_F1(torch.cat((L5, R5), 1))
        F2 = self.OutConv_F2(torch.cat((self.DeConv1(F1), L4, R4), 1))
        F3 = self.OutConv_F3(torch.cat((self.DeConv2(F2), L3, R3), 1))
        F4 = self.OutConv_F4(torch.cat((self.DeConv3(F3), L2, R2), 1))
        F5 = self.OutConv_F5(torch.cat((self.DeConv4(F4), L1, R1), 1))
        regression_fusing = self.regression_fusing(F5)

        # output_fusing = []
        # output_fusing.append(F1)
        # output_fusing.append(F2)
        # output_fusing.append(F3)
        # output_fusing.append(F4)
        # output_fusing.append(F5)
        # output_fusing.append(regression_fusing)


        # for i, feat in enumerate(output_left):
        #     print("左分支的第{}个:".format(i+1), feat.shape)
        #
        # for i, feat in enumerate(output_right):
        #     print("右分支的第{}个:".format(i+1), feat.shape)
        #
        # for i, feat in enumerate(output_fusing):
        #     print("融合分支的第{}个:".format(i+1), feat.shape)

        return regression_left, regression_right, regression_fusing

class HierarchicalFusion(nn.Module):

    def __init__(self, in_chans, num_classes, pretrained=False):
        super(HierarchicalFusion, self).__init__()
        self.sibling_branch = SiblingBranch(in_chans, 3)
        # self.features = feature_extractor(pretrained=pretrained, num_classes=num_classes)
        # self.features = models.resnet18(pretrained=pretrained, num_classes=num_classes)
        self.features = vit_base_patch16_224(pretrained=pretrained, num_classes=num_classes)
        # self.features = tf_efficientnet_b0_ns(pretrained=pretrained, num_classes=num_classes)


    def forward(self, x1, x2):
        output = []
        output_fusing = []
        # x = self.compact(x)
        output.append(x1)
        output.append(x2)
        x1, x2, x3 = self.sibling_branch(x1, x2)
        output.append(x1)
        output.append(x2)
        output_fusing.append(x3)
        # x1 = self.features(x1)
        # x2 = self.features(x2)
        x3 = self.features(x3)
        output.append(x1)
        output.append(x2)
        output_fusing.append(x3)
        print("---------------------------------")
        for i, feat in enumerate(output):
            print("第{}个:".format(i+1), feat.shape)

        for i, feat in enumerate(output_fusing):
            print("融合第{}个:".format(i+1), feat.shape)


        return x3


def hierarchical_fusion(in_chans=3, pretrained=False):
    # model = HierarchicalFusion(in_chans=in_chans, num_classes=1000, pretrained=pretrained)
    # model.default_cfg = model.sibling_branch.default_cfg
    # return model
    pass


if __name__ == "__main__":
    model = HierarchicalFusion(in_chans=3, num_classes=2, pretrained=True)
    x1 = torch.randn(4, 3, 224, 224)
    x2 = torch.randn(4, 3, 224, 224)
    regression = model(x1, x2)



