# coding=utf-8
##
## Author: jmdvirus@aliyun.com
##
## Create: 2019年02月11日 星期一 12时17分58秒
##

import logging
import math
import random
import mxnet as mx
import numpy as np

logging.getLogger().setLevel(logging.DEBUG)

n_sample = 10000
batch_size = 10
learning_rate = 0.1
n_epoch = 10

train_in = [[ random.uniform(0, 1) for c in range(2) ] for n in range(n_sample) ]

train_out = [ 0 for n in range(n_sample) ]

for i in range(n_sample):
    train_out[i] = max(train_in[i][0], train_in[i][1])

train_iter = mx.io.NDArrayIter(data = np.array(train_in),
        label = {'reg_label': np.array(train_out)},
        batch_size = batch_size, shuffle = True)

src = mx.sym.Variable('data')
fc1 = mx.sym.FullyConnected(data = src, num_hidden = 10, name = 'fc1')
act1 = mx.sym.Activation(data = fc1, act_type = 'relu', name = 'act1')
fc2 = mx.sym.FullyConnected(data = act1, num_hidden = 10, name = 'fc2')
act2 = mx.sym.Activation(data = fc2, act_type = 'relu', name = 'act2')
fc3 = mx.sym.FullyConnected(data = act2, num_hidden = 1, name = 'fc3')

net = mx.sym.LinearRegressionOutput(data = fc3, name = 'reg')

module = mx.mod.Module(symbol = net, label_names = (['reg_label']))

module.fit(
        train_iter,
        eval_data = None,
        eval_metric = mx.metric.create('mse'),
        initializer = mx.initializer.Uniform(0.5),
        optimizer = 'sgd',
        optimizer_params = {'learning_rate': learning_rate},
        num_epoch = n_epoch,
        batch_end_callback = None,
        epoch_end_callback = None,
        )

# for k in module.get_params():
#    print(k)

