import os
from collections import OrderedDict

import torch
from . import resnet101, resnet34
from .scheduler import init_scheduler
from .optimizer import init_opitm

class Model():
    '''
    opt:
    {
        'model' : 'resnet101',
        'classes' : 类别数,
        'torch_pretrain' : 是否使用torchvision的预训练权重,
        'save_dir' : 保存路径,
    }
    '''
    def __init__(self, opt):

        model_list = {
            'resnet'    : resnet101,
            'resnet101' : resnet101,
            'resnet34'  : resnet34,
        }

        if opt.model not in model_list:
            raise AttributeError('wrong name of model: %s' % opt.model)

        self.model = opt.model
        self.classes = opt.classes
        self.save_dir = opt.save_dir
        if not os.path.exists(self.save_dir):
            os.mkdir(self.save_dir)

        self.lr = opt.lr
        self.opt_divce = opt.device
        self.resume = opt.resume # 重新开始中断训练

        self.avg_loss = 0

        self.classifier = model_list[opt.model].classifier(opt)
        self.classifier = self.classifier.to(self.opt_divce)
        self.optimizer = init_opitm(self.classifier.parameters(), opt)
        self.scheduler = init_scheduler(opt, self.optimizer)
        self.loss_fn = torch.nn.CrossEntropyLoss()

    def forward(self, input):
        output = self.classifier(input)
        return output

    def update(self, output, label):
        loss = self.loss_fn(output, label)
        # print(loss)
        self.avg_loss = 0.95 * self.avg_loss + loss.item() * 0.05

        self.optimizer.zero_grad()
        loss.backward()
        self.optimizer.step()

        return loss

    def lr_update(self):
        if self.scheduler != None:
            self.scheduler.step()


    def load(self, ckpt_path):
        if not os.path.exists(ckpt_path):
            raise Exception('wrong path {} of checkpoint'.format(ckpt_path))
        load_dict = torch.load(ckpt_path, map_location=self.opt_divce)
        self.classifier.load_state_dict(load_dict['classifier'])
        
        if self.resume:
            epoch = load_dict['epoch']

            self.optimizer.load_state_dict(load_dict['optimizer'])
            if 'scheduler' in load_dict:
                self.scheduler.load_state_dict(load_dict['scheduler'])
                self.scheduler.step()

            print('Load checkpoint from %s, resume training.' % ckpt_path)

        else:
            epoch = load_dict['epoch']

            print('Load checkpoint from %s.' % ckpt_path)

        return epoch

    def save(self, epoch):
        save_filename = '%d_%s.pth' % (epoch, self.model)
        save_path = os.path.join(self.save_dir, save_filename)
        save_dict = OrderedDict()
        save_dict['classifier'] = self.classifier.state_dict()
        save_dict['optimizer'] = self.optimizer.state_dict()
        if self.scheduler is not None:
            save_dict['scheduler'] = self.scheduler.state_dict()
        save_dict['epoch'] = epoch
        torch.save(save_dict, save_path)
        print('Save checkpoint at %s' % save_path)
