# -*- coding: utf-8 -*-
"""
@file name      : model_load.py
@author         : QuZhang
@date           : 2021-1-2 10:23
@brief          : 模型的加载
"""
import torch.nn as nn
from collections import OrderedDict
import torch


class LeNet2(nn.Module):
    def __init__(self, classes):
        super().__init__()

        # 提取特征
        self.features = nn.Sequential(OrderedDict({
            "conv1": nn.Conv2d(3, 6, 5),
            "relu1": nn.ReLU(),
            "maxpool1": nn.MaxPool2d((2, 2), 2),
            "conv2": nn.Conv2d(6, 16, 5),
            "relu2": nn.ReLU(),
            "maxpool2": nn.MaxPool2d(2, 2),
        }))

        # 分类
        self.classifier = nn.Sequential(OrderedDict({
            "linear1": nn.Linear(16*5*5, 120),
            "relu3": nn.ReLU(),
            "linear2": nn.Linear(120, 84),
            "relu4": nn.ReLU(),
            "linear3": nn.Linear(84, classes),
        }))

    def forward(self, x):
        x = self.features(x)
        x = x.view(x.size()[0], -1)  # 将一个通道上的数据变成一行,一维张量
        x = self.classifier(x)
        return x

    def initialize(self):
        for p in self.parameters():
            p.data.fill_(20210102)  # 初始化卷积层和全连接层的参数


if __name__ == "__main__":
    # --------- load net ----------
    # 加载整体模型
    # flag = True
    flag = False
    if flag:
        path_model = "./model.pkl"
        net_load = torch.load(path_model)
        print(net_load)

    # -------- load state_dict -----------
    # 加载上次训练的结果
    flag = True
    # flag = False
    if flag:
        path_state_dict = "./model_state_dict.pkl"
        state_dict_load = torch.load(path_state_dict)  # 1.获取state_dict的加载器
        print(state_dict_load.keys())

    # ------------ update state_dict -----------
    # 用 state_dict 实现继续训练
    flag = True
    if flag:
        net_new = LeNet2(classes=2021)
        print("加载前: ", net_new.features[0].weight[0, ...])
        # 2.使用加载器加载参数
        net_new.load_state_dict(state_dict_load)  # 加载上次训练保存的参数结果，用来赋值给将要训练的网络的参数
        print("加载后：", net_new.features[0].weight[0, ...])
