/**
 * 2017年5月19日
 */
package cn.edu.bjtu;

import java.io.File;
import java.io.IOException;
import java.util.concurrent.TimeUnit;

import org.apache.commons.lang3.SerializationUtils;
import org.datavec.api.split.FileSplit;
import org.deeplearning4j.eval.Evaluation;
import org.deeplearning4j.nn.api.OptimizationAlgorithm;
import org.deeplearning4j.nn.conf.GradientNormalization;
import org.deeplearning4j.nn.conf.MultiLayerConfiguration;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.conf.Updater;
import org.deeplearning4j.nn.conf.layers.GravesLSTM;
import org.deeplearning4j.nn.conf.layers.RnnOutputLayer;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.deeplearning4j.nn.weights.WeightInit;
import org.deeplearning4j.optimize.listeners.ScoreIterationListener;
import org.deeplearning4j.ui.api.Utils;
import org.nd4j.linalg.activations.Activation;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.dataset.DataSet;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.lossfunctions.LossFunctions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import cn.edu.bjtu.datasource.dsiter.TransformedVecDataSetIterator;
import cn.edu.bjtu.datasource.fileiter.TransformedVecWithIdLineVectorRecordReader;

/**
 * 使用cn.edu.bjtu.App统一入口
 * @author Alex
 *
 */
@Deprecated
public class AppDev {
	private static final Logger log = LoggerFactory.getLogger(AppDev.class);
	public static void main(String[] args) throws IOException, InterruptedException {
		String file = "D:\\textdata\\transformdoc\\20170517092837";
		TransformedVecWithIdLineVectorRecordReader rr = new TransformedVecWithIdLineVectorRecordReader(0);
		rr.initialize(new FileSplit(new File(file)));
		TransformedVecDataSetIterator it = new TransformedVecDataSetIterator(rr,3);
		
		TransformedVecWithIdLineVectorRecordReader t = new TransformedVecWithIdLineVectorRecordReader(0);
		String file2 = "D:\\textdata\\transformdoc\\test";
		t.initialize(new FileSplit(new File(file2)));
		TransformedVecDataSetIterator test = new TransformedVecDataSetIterator(t, 3);
//		while(it.hasNext()){
//			DataSet ds = it.next();
//			System.out.println(ds);
//		}
//		it.reset();
//		while(it.hasNext()){
//			DataSet ds = it.next();
//			System.out.println(ds);
//		}
//	
//	    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
//                .seed(123)    //Random number generator seed for improved repeatability. Optional.
//                .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1)
//                .weightInit(WeightInit.XAVIER)
//                .updater(Updater.NESTEROVS).momentum(0.9)
//                .learningRate(0.005)
//                .gradientNormalization(GradientNormalization.ClipElementWiseAbsoluteValue)  //Not always required, but helps with this data set
//                .gradientNormalizationThreshold(0.5)
//                .list()
//                .layer(0, new GravesLSTM.Builder().activation(Activation.TANH).nIn(100).nOut(20).build())
//                .layer(1, new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
//                        .activation(Activation.SOFTMAX).nIn(20).nOut(1).build())
//                .pretrain(false).backprop(true).build();

	    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
	            .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1)
	            .updater(Updater.RMSPROP)
	            .regularization(true).l2(1e-5)
	            .weightInit(WeightInit.XAVIER)
	            .gradientNormalization(GradientNormalization.ClipElementWiseAbsoluteValue).gradientNormalizationThreshold(1.0)
	            .learningRate(0.0018)
	            .list()
	            .layer(0, new GravesLSTM.Builder().nIn(100).nOut(200)
	                .activation("softsign").build())
	            .layer(1, new RnnOutputLayer.Builder().activation("softmax")
	                .lossFunction(LossFunctions.LossFunction.MCXENT).nIn(200).nOut(15).build())
	            .pretrain(false).backprop(true).build();
	    
	    
        MultiLayerNetwork net = new MultiLayerNetwork(conf);
        net.init();

        net.setListeners(new ScoreIterationListener(20)); 
   
        
       
        int nEpochs = 40;
        String str = "Test set evaluation at epoch %d: Accuracy = %.2f, F1 = %.2f";
        for (int i = 0; i < nEpochs; i++) {
            net.fit(it);
            INDArray arr = Nd4j.create(new double[]{42.61209459928796,27.143187310546637,28.997552276065107,-67.68779727304354,-20.187438754248433,-61.747798546915874,-4.064005217776867,-20.102730276295915,17.579604211612605,-26.27668349747546,-47.79171816993039,21.14118924131617,43.95506577461492,-2.5370413517812267,-25.87242092163069,-69.46483485866338,1.9426657404983416,-64.03701794036897,-1.0661576407728717,78.3554993125872,-8.594321549870074,60.40959702926921,-68.47115112980828,-19.686907139141113,-6.48994526930619,39.47328520321753,-33.94165093428455,60.58792022211128,21.168747397954576,-4.7501607512822375,-6.187810566625558,36.12628199288156,4.330020111985505,4.480058914923575,-20.67308028775733,24.257413697894663,56.323896827409044,-14.432461309756036,-20.38332181435544,-8.750354198389687,-60.47441629471723,-31.271388079971075,12.818110849068034,39.857018274022266,-12.831855010066647,-53.69736172549892,37.82957982085645,24.68306499737082,-1.4131889138370752,-83.03472479854827,-38.9246563447814,-81.49741004011594,-39.557538965542335,2.8568362353835255,10.727721859442681,-22.09539270971436,29.273943159147166,32.10011333762668,18.891814821050502,-21.893235003342852,-0.8829155944986269,-17.25549416441936,20.982448135036975,-11.952242165294592,-48.63951942289714,-81.17542073765071,29.98830817718408,-49.056222975908895,76.53962860495085,-18.589674457209185,-1.740762241417542,-33.3220454549155,35.27951927296817,-42.82744026952423,-3.7849389502953272,-12.059556960171903,46.09798927058,-29.564315927913412,-10.854022215120494,14.826446813007351,-12.010269213235006,17.683802902931347,5.502959173405543,-46.45660081144888,-22.44315067442949,12.719516613491578,-2.950754416640848,29.918145325849764,52.44857004068763,14.642887657508254,53.57380835560616,61.73820570698058,-14.504653498050175,86.08927796257194,61.83140442031436,36.7445044091437,-104.09403585680411,-79.88210681208875,-31.837208732496947,-46.51780287446309});
            System.out.println(net.output(arr));//Evaluate on the test set:
            Evaluation evaluation = net.evaluate(test);
            System.out.println(evaluation.confusionToString());
            log.info(String.format(str, i, evaluation.accuracy(), evaluation.f1()));
            it.reset();
            test.reset();
        }
        
        
	}
}
