from functools import wraps
import torch
from torch.optim import *
from torch import nn
import math
import os

from util.utils import mkdirs

from util.logger import Recorder
recorder_config = {
    "test": {
        "header": "node, class, accurancy",
        "file": "output/recorder/bnntree_test.csv",
    }
}

@Recorder(header=recorder_config["test"]["header"], recorder_file=recorder_config["test"]["file"], mode="w")
def recorde_test(*args):
    """
    header: node_name, class, accurancy
    """
    pass

tree_model_file = ["model", "bnnt"]

    
class BNNTree:
    def __init__(self, n, in_channels, data_manager, BNNNode, epoch, pretrain=False, device="cpu"):
        """
        :parameters: classes, in_channels, data_manager, BNNNode, epoch, device
        """
        self.classes = n
        self.in_channels = in_channels
        self.data_manager = data_manager
        tree_model_file.append(data_manager.bnnt_datasets.__name__)
        self.tree_model_dir = os.path.join(*tree_model_file)
        self.device = device
        self.BNNNode = BNNNode
        self.epoch = epoch
        self.pretrain = pretrain
        self.id = -1

        print(f"Using: {self.device}")
        self.tree = self.build(0, n-1, 0)

        if self.pretrain:
            self.restore(0, n-1, self.tree)
        
        self.optim = SGD # default
        self.lr = 1e-1 # default
        self.lrs_args = None

    def set_optim_lr(self, optim, lr):
        self.optim = optim
        self.lr = lr

    def set_lrs(self, lrs_args):
        self.lrs_args = lrs_args

    def get_id(self):
        self.id += 1
        return self.id

    def restore(self, lb, rb, root):
        if self.is_leaf(root):
            return
        if lb > rb:
            return None
        mid = lb + math.floor((rb - lb) / 2)
        node_file = os.path.join(self.tree_model_dir, root.node_name + ".pk")
        root.load_state_dict(torch.load(node_file, map_location=self.device))
        if lb != rb:
            self.restore(lb, mid, root.left)
            self.restore(mid+1, rb, root.right)

    def build(self, lb, rb, level):
        if lb > rb:
            return None
        mid = lb + math.floor((rb - lb) / 2)
        node_name = f"{lb}({mid}){rb}"
        node = self.BNNNode(mid, self.get_id(), node_name, self.in_channels, level).to(self.device)
        # if self.pretrain:
        #     node_file = os.path.join(self.tree_model_dir, node_name + ".pk")
        #     node.load_state_dict(torch.load(node_file, map_location=self.device))
        if lb != rb:
            node.set_left(self.build(lb, mid, level+1))
            node.set_right(self.build(mid+1, rb, level+1))
        return node

    def in_traversal(self, root):
        if root is None:
            return
        self.in_traversal(root.left)
        print(root.val)
        self.in_traversal(root.right)

    def is_leaf(self, node):
        if (node.left is None) & (node.right is None):
            return True
        return False

    def get_dataloader(self, lb, rb, istrain=True):
        return self.data_manager.get_subdata(lb, rb, istrain)

    def train(self, root, lb, rb):
        if self.is_leaf(root):
            return 
        dataloader = self.get_dataloader(lb, rb)
        loss_fn = nn.CrossEntropyLoss()
        optim = self.optim(root.parameters(), self.lr, weight_decay=1e-4)
        if self.lrs_args is not None:
            self.lrs = lr_scheduler.MultiStepLR(optim, *(self.lrs_args[root.level]))
        
        for epoch in range(self.epoch[root.level]):
            root.train(dataloader, loss_fn, optim, self.device, epoch)
            acc, loss = self.node_test_acc(root, lb, rb)
            print(f"Test node[{root.node_name}] loss: {loss:>7f} acc: {(acc):>0.2f}%")
            if self.lrs_args is not None:
                self.lrs.step()
                print(f"current learning rate: {self.lrs.get_last_lr()}")
        print(f"Test acc:{(100 * self.test_acc()):>0.2f}%")
        
        self.train(root.left, lb, root.val)
        self.train(root.right, root.val+1, rb)

    def save_tree(self, root):
        if self.is_leaf(root):
            return
        path = os.path.join(self.tree_model_dir, root.node_name + ".pk")
        mkdirs(path)
        torch.save(root.state_dict(), path)
        self.save_tree(root.left)
        self.save_tree(root.right)

    def pre_order(self, func):
        """
        装饰器
        先序遍历，第一个参数必须为根节点
        """
        @wraps(func)
        def wrapper(*args, **kwargs):
            if self.is_leaf(args[0]):
                return
            func(*args, **kwargs)
        # to do

    def node_test_acc(self, node, lb, rb):
        size_ = 0
        count = 0
        loss = 0
        for i in range(lb, rb+1):
            data = self.get_dataloader(i, i, istrain=False)
            size_ += len(data.dataset)
            for x, y in data:
                label = y[0].item()
                x = x.to(self.device)
                y = y.to(self.device)
                pred = node(x)
                y[y <= node.val] = 0
                y[y > node.val] = 1
                loss += node.loss_fn(pred, y).item()
                pred = pred.argmax(1)
                if node.val < label:
                    count += len(x[pred > 0])
                else:
                    count += len(x[pred < 1])
        acc = 100 *  count / size_
        loss = loss / size_
        recorde_test(node.node_name, loss, acc)
        return acc, loss
            
    def test_acc(self):
        size_ = 0
        count = 0
        for i in range(self.classes):
            data = self.get_dataloader(i, i, istrain=False)
            size_ += len(data.dataset)
            for x, y in data:
                x = x.to(self.device)
                y = y.to(self.device)
                count += self.batch_predict_(x, y[0].item())
                # count += (pred == y).type(torch.float).sum().item()
        return count / size_

    def batch_predict_(self, x, y):
        size_ = len(x)
        count = 0
        node = self.tree
        while not self.is_leaf(node):
            node_name = node.get_node_name()
            if len(x) == 0:
                break
            pred = node(x).argmax(1)
            if node.val < y: #  right
                x = x[pred > 0]
                node = node.right
            else: # left
                x = x[pred < 1]
                node = node.left
            # recorde_test(node_name, y, 100 * len(x) / size_)
        return len(x)

    def batch_predict(self, x):
        res = torch.ones(len(x))
        for index, x_ in enumerate(x):
            node = self.tree
            while not self.is_leaf(node):
                pred = node(torch.unsqueeze(x_, dim=0)).argmax(1)
                if pred < 1:
                    node = node.left
                else:
                    node = node.right
            res[index] = node.val
        # return (res == y).type(torch.float).sum().item()
        return res

    def __str__(self):
        self.in_traversal(self.tree)
        return ""

if __name__ == "__main__":
    pass