package com.auxiliary;

import java.util.ArrayList;
import java.util.List;

import org.encog.engine.network.activation.ActivationBiPolar;
import org.encog.engine.network.activation.ActivationSigmoid;
import org.encog.engine.network.activation.ActivationSoftMax;
import org.encog.engine.network.activation.ActivationStep;
import org.encog.ml.CalculateScore;
import org.encog.ml.data.MLData;
import org.encog.ml.data.MLDataSet;
import org.encog.ml.data.basic.BasicMLData;
import org.encog.ml.data.temporal.TemporalDataDescription;
import org.encog.ml.data.temporal.TemporalMLDataSet;
import org.encog.ml.data.temporal.TemporalPoint;
import org.encog.ml.train.MLTrain;
import org.encog.ml.train.strategy.Greedy;
import org.encog.ml.train.strategy.HybridStrategy;
import org.encog.ml.train.strategy.StopTrainingStrategy;
import org.encog.neural.networks.BasicNetwork;
import org.encog.neural.networks.layers.BasicLayer;
import org.encog.neural.networks.training.Train;
import org.encog.neural.networks.training.TrainingSetScore;
import org.encog.neural.networks.training.anneal.NeuralSimulatedAnnealing;
import org.encog.neural.networks.training.propagation.back.Backpropagation;
import org.encog.neural.networks.training.propagation.resilient.ResilientPropagation;
import org.encog.neural.pattern.ElmanPattern;
import org.encog.util.arrayutil.NormalizeArray;

public class NeuralNetwork {
	
	private List<Double> dataList;
	public List<Double> normalizeList;
	private List<Double> predictList;
	private BasicNetwork network;
	private int lagSize;
	private int leadSize = 1;
	
	public NeuralNetwork(List<Double> dataList) {
		this.dataList = dataList;
		normalize(0, 1);
		this.lagSize = 12;
		this.leadSize = 1;
	}
	
	public NeuralNetwork(List<Double> dataList, int lagSize, int leadSize) {
		this.dataList = dataList;
		normalize(-0, 0.5);
		this.lagSize = lagSize;
		this.leadSize = leadSize;
	}
	
	//
	private void normalize(double lo, double hi) {
		NormalizeArray norm = new NormalizeArray();
        norm.setNormalizedHigh(hi);
        norm.setNormalizedLow(lo);
        
        double[] list = new double[dataList.size()];
        
        for(int i = 0; i < dataList.size(); i++)
        	list[i] = dataList.get(i);
        
        
        double[] temp = norm.process(list); 
        
        normalizeList = new ArrayList<Double>();
        for(double v: temp)
        	normalizeList.add(v);
	}
	
	//
	public MLDataSet generateTraining()
	{
		TemporalMLDataSet result = new TemporalMLDataSet(lagSize, leadSize);
		
		TemporalDataDescription desc = new TemporalDataDescription(
				TemporalDataDescription.Type.RAW, true, true);
		
		result.addDescription(desc);
		
		for(int i = 0; i < this.normalizeList.size(); i++)
		{
			TemporalPoint point = new TemporalPoint(1);
			point.setSequence(i);
			point.setData(0, this.normalizeList.get(i));
			result.getPoints().add(point);
		}
		
		result.generate();
		
		return result;
	}
	
	//创建神经网络
	private void createNetwork()
	{
		network = new BasicNetwork();
		
		BasicLayer input = new BasicLayer(lagSize);
		BasicLayer hidden1 = new BasicLayer(new ActivationSoftMax(), true, 10);
		//BasicLayer hidden2 = new BasicLayer(new ActivationSigmoid(), true, 5);
		
		
		network.addLayer(input);
		network.addLayer(hidden1);
		//network.addLayer(hidden2);
		
		
		
		network.addLayer(new BasicLayer(1));
		
		
		input.setContextFedBy(hidden1);
		//hidden1.setContextFedBy(hidden2);
		
		
		network.getStructure().finalizeStructure();
		network.reset();
		
		//network = (BasicNetwork)pattern.generate();
	}
	
	//根据数据集，对神经网络进行训练
	private void train(MLDataSet training)
	{
		
		//final Train train = new ResilientPropagation(network, training);
		
		final StopTrainingStrategy stop = new StopTrainingStrategy();
		
		final Train train = new Backpropagation(network, training, 0.2, 0.01);

		train.addStrategy(stop);
		
		int epoch = 1;

		while(!stop.shouldStop() && epoch < 100000) {
			train.iteration();
			System.out.println("Epoch #" + epoch + " Error:" + train.getError());
			epoch++;
		}
	}
	
	//开始预测
	public List<Double> predict(int count)
	{
		createNetwork();
		MLDataSet training = generateTraining();
		train(training);
		
		predictList = new ArrayList<Double>(normalizeList);
		
		for(int i = 0; i < count; i++)
		{
			// calculate based on actual data
			MLData input = new BasicMLData(lagSize);
			for(int j = 0; j < lagSize; j++)
			{
				input.setData(j, this.predictList.get(this.predictList.size() + j - lagSize));
			}
			MLData output = network.compute(input);
			
			
			this.predictList.add(output.getData(0));
		}
		
		BasicCalculator bc = new BasicCalculator();
		
		double avg = bc.calcAvg(dataList);
		double stdv = bc.calcStdev(dataList);
		
		/*
		for(int i = 0; i < predictList.size(); i++)
			predictList.set(i, predictList.get(i) * stdv * 1.5 + avg);*/
		
		return predictList;
	}
	
	public List<Double> predictOne(int count)
	{
		createNetwork();
		MLDataSet training = generateTraining();
		train(training);
		
		predictList = new ArrayList<Double>(normalizeList);
		
		for(int i = 0; i < count; i++)
		{
			// calculate based on actual data
			MLData input = new BasicMLData(lagSize);
			for(int j = 0; j < lagSize; j++)
			{
				input.setData(j, this.predictList.get(this.predictList.size() + j - lagSize));
			}
			MLData output = network.compute(input);
			
			
			this.predictList.add(output.getData(0));
		}
		
		BasicCalculator bc = new BasicCalculator();
		
		double avg = bc.calcAvg(dataList);
		double stdv = bc.calcStdev(dataList);
		
		/*
		for(int i = 0; i < predictList.size(); i++)
			predictList.set(i, predictList.get(i) * stdv * 1.5 + avg);*/
		
		return predictList;
	}
	
	public List<Double> compare(int count)
	{
		createNetwork();
		MLDataSet training = generateTraining();
		train(training);
		
		predictList = new ArrayList<Double>(normalizeList);
		
		for(int i = predictList.size() - count; i < predictList.size(); i++)
		{
			// calculate based on actual data
			MLData input = new BasicMLData(lagSize);
			for(int j = 0; j < lagSize; j++)
			{
				input.setData(j, this.predictList.get(i - lagSize + j));
			}
			MLData output = network.compute(input);
			
			
			this.predictList.set(i, output.getData(0));
		}
		
		BasicCalculator bc = new BasicCalculator();
		
		double avg = bc.calcAvg(dataList);
		double stdv = bc.calcStdev(dataList);
		
/*
		for(int i = 0; i < predictList.size(); i++)
			predictList.set(i, predictList.get(i) * stdv * (3 + i / 2.0 / predictList.size()) + avg);
		*/
		return predictList;
	}
}
