#!/usr/bin/env python3
#-*- coding:utf8 -*-
# Power by 2020-06-13 15:16:14

import os
import paddle 
import paddle.fluid as fluid
from paddle.fluid.dygraph.nn import Conv2D,Pool2D,Linear
import json
import gzip
import random
import numpy as np
from PIL import Image
import sys
curpath=os.path.abspath(os.curdir)
sys.path.append(curpath)
import dataloader
import model



with fluid.dygraph.guard():
    md=model.MNIST()
    md.train()
    train_data=dataloader.load_data('train')
    print("优化算法参数调节")
    optimizer=fluid.optimizer.SGDOptimizer(learning_rate=0.01,parameter_list=md.parameters())
    #optimizer=fluid.optimizer.MomentumOptimizer(learning_rate=0.01,momentum=0.9,parameter_list=md.parameters())
    #optimizer=fluid.optimizer.AdagradOptimizer(learning_rate=0.01,parameter_list=md.parameters())
    #optimizer=fluid.optimizer.AdamOptimizer(learning_rate=0.01,parameter_list=md.parameters())
    epoch_num=5
    for epoch_id in range(epoch_num):
        for batch_id, data in enumerate(train_data()):
            image_data,label_data=data
            image=fluid.dygraph.to_variable(image_data)
            label=fluid.dygraph.to_variable(label_data)

            fw=md(image)
            loss=fluid.layers.cross_entropy(fw,label)
            avg_loss=fluid.layers.mean(loss)
            if batch_id % 100 ==0:
                print("epoch:{},batch_id:{},loss:{}".format(epoch_id,batch_id,avg_loss.numpy()))
            avg_loss.backward()
            optimizer.minimize(avg_loss)
            md.clear_gradients()
    fluid.save_dygraph(md.state_dict(),'mnist-model')

