from ..model import build_detector
from ..dataset import build_dataset, build_preprocess_op
from torch.utils.data.dataloader import DataLoader
from ..log import logger_dict, build_logger
from ..utils import build_optimizer, WarmupScheduler
import torch
import os


def train(cfg):
    model_cfg = cfg['Model']
    dataset_cfg = cfg['Dataset']
    train_cfg = cfg['Train_cfg']

    model_cfg['mode'] = 'train'
    model = build_detector(model_cfg).cuda()
    model.init_weights()
    model.train()

    dataset = build_dataset(dataset_cfg)

    data_pipe_cfgs = train_cfg['data_pipe']
    optimizer_cfg = train_cfg['optimizer']
    epoch = train_cfg['epoch']
    batch_size = train_cfg['batch_size']
    save_dir = train_cfg['save_dir']
    log_cfg = train_cfg['log']

    data_pipe = []
    for data_preprocess_cfg in data_pipe_cfgs:
        data_pipe.append(build_preprocess_op(data_preprocess_cfg))
    dataset.set_data_pipe(data_pipe)

    data_loader = DataLoader(dataset=dataset, batch_size=batch_size, shuffle=True, drop_last=True)
    decay_epoch = optimizer_cfg.pop('decay_epoch')
    optimizer_cfg['params'] = model.parameters()
    scheduler = WarmupScheduler(decay_epoch, 0.1, 20)
    optimizer = build_optimizer(optimizer_cfg)

    log_interval = int(log_cfg['interval'])
    logger = build_logger(log_cfg['logger'])
    for e in range(epoch):
        for i, data in enumerate(data_loader):
            input_data, annotation = data
            input_data = input_data.cuda()
            cls_label = annotation[:, 0]
            cls_label = cls_label.cuda().long()
            loss, metric_dict = model(input_data, cls_label)

            optimizer.zero_grad()
            loss.backward()
            scheduler.schedule(e, optimizer)
            optimizer.step()

            metric_dict['lr'] = round(optimizer.param_groups[0]['lr'], 5)
            metric_dict['step'] = i
            metric_dict['epoch'] = e+1
            if i % log_interval == 0:
                logger.log(metric_dict)
        # if e == epoch - 1:
        if not os.path.exists(save_dir):
            os.makedirs(save_dir, 0o0777)
        torch.save(model.state_dict(), os.path.join(save_dir, 'epoch{:d}.pth'.format(e+1)))










