import torch
from QfUtil.NetworkStructure.standard_net import StandardNetwork, MyConv, MyMLP, Concat
from torch.nn import GRU, Flatten, Identity


class VisualRoute(StandardNetwork):
    def __init__(self):
        super().__init__()
        self.add_module(
            'input layer',
            MyConv(
                in_channels=1,
                out_channels=4
            )
        )
        self.add_module(
            'layer 1',
            MyConv(
                in_channels=4,
                out_channels=10
            )
        )
        self.add_module(
            'layer 2',
            MyConv(
                in_channels=10,
                out_channels=15
            )
        )
        self.add_module(
            'layer 3',
            MyConv(
                in_channels=15,
                out_channels=30,
                stride=2,
            )
        )
        self.add_module(
            'layer 4',
            MyConv(
                in_channels=30,
                out_channels=30,
                stride=2
            )
        )
        self.add_module(
            'layer 5',
            MyConv(
                in_channels=30,
                out_channels=10
            )
        )
        self.add_module(
            'layer 6',
            MyMLP(
                10 * 7 * 7, 7 * 7, 10, drop_rate=0.5
            )
        )


class AuditoryRoute(VisualRoute):
    def __init__(self):
        super(AuditoryRoute, self).__init__()
        del self.module_names[self._modules['input layer']]
        self._modules['input layer'] = MyConv(
            in_channels=4, out_channels=4, kernel_size=9, padding=0, stride=2
        )
        self.module_names[self._modules['input layer']] = 'input layer'


size_dict = {
    1: 20,
    2: 7 * 7 * 2,
    3: 14 * 14 * 2,
    4: 28 * 28 * 2,
}

from QfUtil.Basic.torch_util import cat
class MyGRU(GRU):
    def __init__(self, batch_first=True, hidden_size=200, input_size=size_dict[1]):
        super(MyGRU, self).__init__(
            batch_first=batch_first,
            hidden_size=hidden_size,
            input_size=input_size,
            bias=False
        )
        self.hidden_buffer = None

    def forward(self, din, **kwargs):
        if len(din.size())==2:
            din = din.unsqueeze(1).repeat(1,30,1)
        else:
            din = cat(din, torch.zeros_like(din), dim=1)
        hidden, output = super().forward(din, **kwargs)
        self.hidden_buffer = hidden
        return output.permute(1,0,2)


class Cortex(StandardNetwork):
    def __init__(self, depth=1, node_size=500):
        super().__init__()
        if depth == 1:
            dim = 1
        else:
            dim = 2
        self.add_module('concat', Concat(dim=dim))
        self.add_module('flatten', Flatten(start_dim=dim))
        self.add_module('cortex',
                        MyGRU(
                            batch_first=True,
                            hidden_size=node_size,
                            input_size=size_dict[depth],
                        )
                        )
        self.add_module('output', MyMLP(node_size, 3, 10, drop_rate=0,stimulator=Identity()))

    def analyse(self, din):
        result = super().analyse(din)
        result['gru hidden'] = self._modules['cortex'].hidden_buffer
        result['self'] = self
        return result


class Brain(StandardNetwork):
    def __init__(self, visual_route=VisualRoute(), auditory_route=AuditoryRoute(), depth=1, node_size=500):
        super().__init__()
        self.visual_route = visual_route
        self.auditory_route = auditory_route
        self.cortex = Cortex(depth=depth, node_size=node_size)
        self.add_module('visual route', visual_route)
        self.add_module('auditory route', auditory_route)
        self.add_module('cortex', self.cortex)
        self.depth = depth
        self.node_size = node_size

    def forward(self, din):
        vdin, adin = din
        vr = list(self.visual_route.analyse(vdin).values())[-self.depth]
        ar = list(self.auditory_route.analyse(adin).values())[-self.depth]
        return self.cortex((vr,ar))

    def analyse(self, din):
        """

        :param din:
        :return: vr, ar, cr
        """
        vdin, adin = din
        vr = self.visual_route.analyse(vdin)
        ar = self.auditory_route.analyse(adin)
        cr = self.cortex.analyse((list(vr.values())[-self.depth], list(ar.values())[-self.depth]))
        return vr, ar, cr

from torch.nn import Sequential
class NoRNNBrain(Brain):
    def __init__(self, visual_route=VisualRoute(), auditory_route=AuditoryRoute(), depth=1, node_size=500):
        super().__init__()
        self.visual_route = visual_route
        self.auditory_route = auditory_route
        self.cortex = Sequential(
            Concat(depth),
            Flatten(start_dim=depth),
            MyMLP(size_dict[depth], 2*size_dict[depth], 10),
        )

        self.add_module('visual route', visual_route)
        self.add_module('auditory route', auditory_route)
        self.add_module('cortex', self.cortex)
        self.depth = depth
        self.node_size = node_size


if __name__ == '__main__':
    from torch import randn

    vr = randn([20, 1, 28, 28])
    ar = randn([20, 4, 64, 64])
    din = (vr, ar)
    gru = Brain(depth=2)
    r = gru(din)
    r2 = gru.analyse(din)
    print('test')

if __name__ == '__main__':
    from Mydatasets.ViAuDataset.datasetclass import ViAuDataset

    dataset = ViAuDataset(path=r'../a1_VAData/ViAuDataset.dataset')
    vdin, adin, label, dictionary = dataset.sample(gpu=False)

    vnet = VisualRoute()
    anet = AuditoryRoute()

    vr = vnet(vdin)
    ar = anet(adin)

    print('test')

if __name__ == '__main__':
    from Mydatasets.ViAuDataset.datasetclass import ViAuDataset

    dataset = ViAuDataset(path=r'../a1_VAData/ViAuDataset.dataset')
    vdin, adin, label, dictionary = dataset.sample(gpu=False)

    visual_net = VisualRoute().load(r'../a3_Train/visaul_net.model')
    print('test')
