#!/usr/bin/env python3
#-*- coding:utf8 -*-
# Power by 2020-06-09 23:21:29

import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph.nn import Linear
import numpy as np
import os 
import json
import gzip
import random

import sys
curpath=os.path.abspath(os.curdir)
sys.path.append(curpath)
import loadmnistdata
class MNIST(fluid.dygraph.Layer):

    """Docstring for MNIST. """

    def __init__(self):
        """TODO: to be defined. """
        super(MNIST,self).__init__()
        self.fc=Linear(input_dim=784,output_dim=1,act=None)
    def forward(self,inputs):
        inputs=fluid.layers.reshape(inputs,(-1,784))
        outputs=self.fc(inputs)
        return outputs
with fluid.dygraph.guard():
    model=MNIST()
    model.train()
    train_loader=loadmnistdata.load_data('train')
    optimizer=fluid.optimizer.SGDOptimizer(learning_rate=0.001,parameter_list=model.parameters())
    EPOCH_NUM=10
    for epoch_id in range(EPOCH_NUM):
        for batch_id,data in enumerate(train_loader()):
            image_data,label_data=data
            image=fluid.dygraph.to_variable(image_data)
            label=fluid.dygraph.to_variable(label_data)
            predict=model(image)
            loss=fluid.layers.square_error_cost(predict,label)
            avg_loss=fluid.layers.mean(loss)
            if batch_id % 200 ==0:
                print("epoch: {}, batch: {}, loss is: {}".format(epoch_id, batch_id, avg_loss.numpy()))
            avg_loss.backward()
            optimizer.minimize(avg_loss)
            model.clear_gradients()
    fluid.save_dygraph(model.state_dict(),'mnist')


        
