import find_mxnet
import mxnet as mx
import logging
import argparse
import os
import train_model
import json
import urllib2
import socket

def parse_args():
    parser = argparse.ArgumentParser(description='train an classifer on img')
    parser.add_argument('--network', type=str, default='mlp',
                        choices = ['mlp', 'lenet'],
                        help = 'the cnn to use')
    parser.add_argument('--data-dir', type=str, default='/home/spark/train_data/img_10_data0',
                        help='the input data directory')
    #/home/spark/train_data/img_10min_full_gray_data
    parser.add_argument('--gpus', type=str,
                        help='the gpus will be used, e.g "0,1,2,3"')
    parser.add_argument('--num-examples', type=int, default=27975,#77281, 
                        help='the number of training examples')
    #the count of all example is XXXXX, training data is 70% of all example
    parser.add_argument('--batch-size', type=int, default=64,
                        help='the batch size')
    parser.add_argument('--lr', type=float, default=0.0001,
                        help='the initial learning rate')
    parser.add_argument('--model-prefix', type=str,
                        help='the prefix of the model to load/save')
    parser.add_argument('--save-model-prefix', type=str,
                        help='the prefix of the model to save')
    parser.add_argument('--num-epochs', type=int, default=50,
                        help='the number of training epochs')
    parser.add_argument('--load-epoch', type=int,
                        help="load the model on an epoch using the model-prefix")
    parser.add_argument('--kv-store', type=str, default='local',
                        help='the kvstore type')
    parser.add_argument('--lr-factor', type=float, default=1,
                        help='times the lr with a factor for every lr-factor-epoch epoch')
    parser.add_argument('--lr-factor-epoch', type=float, default=1,
                        help='the number of epoch to factor the lr, could be .5')
    return parser.parse_args()

def get_mlp():
    """
    multi-layer perceptron
    """
    data = mx.symbol.Variable('data')
    fc1  = mx.symbol.FullyConnected(data = data, name='fc1', num_hidden=128)
    act1 = mx.symbol.Activation(data = fc1, name='relu1', act_type="relu")
    fc2  = mx.symbol.FullyConnected(data = act1, name = 'fc2', num_hidden = 64)
    act2 = mx.symbol.Activation(data = fc2, name='relu2', act_type="relu")
    fc3  = mx.symbol.FullyConnected(data = act2, name='fc3', num_hidden=7)
    mlp  = mx.symbol.SoftmaxOutput(data = fc3, name = 'softmax')
    return mlp

def get_lenet():
    data = mx.symbol.Variable('data')
    # first conv
    conv1 = mx.symbol.Convolution(data=data, kernel=(5,5), num_filter=20)
    tanh1 = mx.symbol.Activation(data=conv1, act_type="tanh")
    pool1 = mx.symbol.Pooling(data=tanh1, pool_type="max",
                              kernel=(2,2), stride=(2,2))
    # second conv
    conv2 = mx.symbol.Convolution(data=pool1, kernel=(5,5), num_filter=50)
    tanh2 = mx.symbol.Activation(data=conv2, act_type="tanh")
    pool2 = mx.symbol.Pooling(data=tanh2, pool_type="max",
                              kernel=(2,2), stride=(2,2))
    # first fullc
    flatten = mx.symbol.Flatten(data=pool2)
    fc1 = mx.symbol.FullyConnected(data=flatten, num_hidden=500)
    tanh3 = mx.symbol.Activation(data=fc1, act_type="tanh")
    # second fullc
    fc2 = mx.symbol.FullyConnected(data=tanh3, num_hidden=7)
    # loss
    lenet = mx.symbol.SoftmaxOutput(data=fc2, name='softmax')
    return lenet

def calculate(nums,rank):
    url = "http://127.0.0.1/monitor"
    bili = []

    try:
        response = urllib2.urlopen(url, timeout=5)
    except urllib2.URLError, e:
        for i in range(nums):
            bili.append(1)
        return bili

    except socket.timeout, e:
        for i in range(nums):
            bili.append(1)
        return bili

    if response.read() == '':
        for i in range(nums):
            bili.append(1)
        return bili

    bili = response.read()

def get_iterator(data_shape):
    def get_iterator_impl(args, kv):
        if kv == None or kv.num_workers == 1:
            ratio = [1,]
            batch_sync = args.batch_size
            logging.info(batch_sync)
        else:
            #bili = calculate(kv.num_workers,kv.rank)
            ratio = getRatio()
            logging.info('###################origin ratio= ' + str(ratio))
            ratio = [max(int(round(10 * i)), 1) for i in ratio]
            for i in range(len(ratio) / 2):
                tmp = ratio[i]
                ratio[i] = ratio[len(ratio) - 1 - i]
                ratio[len(ratio) - 1 - i] = tmp
            ind = kv.rank
            logging.info('###################now ratio= ' + str(ratio))
            batch_sync = ratio[ind] * kv.num_workers * args.batch_size / sum(ratio)
            logging.info('###################batch_sync=%d' % batch_sync)
            #logging.info('###################bili= '+str(bili))
            #batch_sync=args.batch_size


        train_dataiter = mx.io.ImageRecordIter(
            path_imgrec=args.data_dir + "/data_train.rec",
            mean_img=args.data_dir + "/mean.bin",
            rand_crop=True,
            rand_mirror=True,
            data_shape=data_shape,
            batch_size=batch_sync,
            preprocess_threads=1,
            num_parts=kv.num_workers,
            part_index=kv.rank)
            #part_sync=ratio)

        test_dataiter = mx.io.ImageRecordIter(
            path_imgrec=args.data_dir + "/data_val.rec",
            mean_img=args.data_dir + "/mean.bin",
            rand_crop=False,
            rand_mirror=False,
            data_shape=data_shape,
            batch_size=batch_sync,
            preprocess_threads=1,
            num_parts=kv.num_workers,
            part_index=kv.rank)
            #part_sync=ratio)

        return (train_dataiter, test_dataiter)
    return get_iterator_impl

def getRatio():
    return [1,1,1,1]
    url = "http://10.108.166.249:23457/"
    req = urllib2.Request(url)
    res_data = urllib2.urlopen(req)
    res = res_data.read()
    ratio = [float(i) for i in res.split(':')]
    return ratio

def main():
    args = parse_args()

    if args.network == 'mlp':
        data_shape = (1, 80, 80)
        net = get_mlp()
    else:
        data_shape = (1, 80, 80)
        net = get_lenet()

    train_model.fit(args, net, get_iterator(data_shape))


if __name__=="__main__":
    main()
