# -*- coding: utf-8 -*-
import torch
from torch import nn
from layers import _Conv2d, _Linear, _BatchNorm2d


class ResBlock(nn.Module):
    def __init__(self, D_in, D, train):
        super().__init__()
        Conv2d = _Conv2d if train else nn.Conv2d
        BatchNorm2d = _BatchNorm2d if train else nn.BatchNorm2d
        self.h1 = Conv2d(D_in, D, 3, 1, 1, bias=False)
        self.bn1 = BatchNorm2d(D)
        self.relu1 = nn.ReLU()
        self.h2 = Conv2d(D, D, 3, 1, 1, bias=False)
        self.bn2 = BatchNorm2d(D)
        self.relu2 = nn.ReLU()
        self.skip_add = nn.quantized.FloatFunctional()

    def forward(self, x):
        t = self.relu1(self.bn1(self.h1(x)))
        t = self.bn2(self.h2(t))
        return self.relu2(self.skip_add.add(x, t))


class ReversiNet(nn.Module):
    def __init__(self, cfg=[(6, 128)], avg_pool=True, train=False):
        super(ReversiNet, self).__init__()
        last_c = cfg[0][1]
        Conv2d = _Conv2d if train else nn.Conv2d
        Linear = _Linear if train else nn.Linear
        BatchNorm2d = _BatchNorm2d if train else nn.BatchNorm2d
        blocks = [Conv2d(2, last_c, 3, 1, 1), BatchNorm2d(last_c), nn.ReLU()]
        for n, c in cfg:
            for i in range(n):
                blocks.append(ResBlock(last_c, c, train))
                last_c = c
        self.resblocks = nn.Sequential(*blocks)
        self.out1 = nn.Sequential(
            Conv2d(last_c, 64, 1),
            BatchNorm2d(64),
            nn.ReLU(),
            nn.Flatten(),
            Linear(64 * 64, 512),
            nn.ReLU(),
            Linear(512, 1024),
            nn.ReLU(),
            Linear(1024, 65),
        )
        self.out2 = nn.Sequential(Conv2d(last_c, 1, 1, bias=False), nn.Flatten())

    def forward(self, x):
        x = self.resblocks(x)
        p = self.out1(x)
        p[..., :-1] += self.out2(x)
        return p


if __name__ == '__main__':
    m = ReversiNet().half()
    torch.save(m.state_dict(), "w.pt")
