package weka.classifiers.neural;

import java.util.Enumeration;
import java.util.LinkedList;
import java.util.Random;
import java.util.Vector;

import weka.classifiers.AbstractClassifier;
import weka.classifiers.RandomizableClassifier;
import weka.classifiers.neural.backpropagation.BpModel;
import weka.classifiers.neural.common.Utils;
import weka.classifiers.neural.common.learning.LearningKernelFactory;
import weka.classifiers.neural.common.learning.LearningRateKernel;
import weka.classifiers.neural.common.transfer.TransferFunction;
import weka.classifiers.neural.common.transfer.TransferFunctionFactory;
import weka.core.Attribute;
import weka.core.Capabilities;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.Option;
import weka.core.SelectedTag;

import weka.core.UnsupportedClassTypeException;
import weka.core.Capabilities.Capability;


public class Backpropagation extends RandomizableClassifier {
	/**
	 * 
	 */
	private static final long serialVersionUID = 415671694484586990L;
	protected int numLayers=3;
	protected String snumNodes;
	private int[] numNodes;
	protected int iterations;
	protected int itransferFunction;
	protected int learningRateFunction;
	protected double learningRate;

	public final static int PARAM_NUM_ITERATE	   = 0;
	public final static int PARAM_NUM_LAYERS       = 1;
    public final static int PARAM_S_NUM_NODES      = 2;  
    public final static int PARAM_TRANSFER_FUNCTION= 3;
    public final static int PARAM_LEARNINGRATE_FUNCTION=4;
    public final static int PARAM_LEARNINGRATE	   =5;
	
    protected int numInstances;
    protected int numAttributes;
    protected int numClasses;
    protected int numOutputLayer;
    protected LearningRateKernel lrateFunction;
    protected TransferFunction transferFunction;
    protected BpModel model;
    
    private Random rand;
 
	 // param flags
    public final static String [] PARAMETERS =
    {
    	"I",  // 迭代次数
        "L",  // 神经网络隐藏层层数
        "N",  // 每层节点数目，用逗号分开的字符串
        "F",  // 激励函数
        "M",  // 学习率函数  
        "R"   // 学习率  
    };
    
    
	public final static String [] PARAMETER_NOTES =
	{	
		"<iterations(迭代次数)>", //迭代次数
		"<num layers(隐藏层层数)>",  // 神经网络隐藏层层数
		"<total num nodes(每层节点数目,用\',\'分开)>",  // 每层节点数目，用逗号分开的字符串
		"<transfer function(激励函数)>",
		"<learning rate function(学习率函数)>",
		"<learning rate(学习率)>"
	};    

    // descriptions for all parameters
    public final static String [] PARAM_DESCRIPTIONS =
    {
    	"Number of Training Iterations(训练数据集迭代次数)",
        "The number of layers in the network (神经网络隐藏层层数)",
        "The number of nodes in every layer(每一层节点数目,用\',\'分割,例如3,3,2)",
        "Neuron transfer function "+TransferFunctionFactory.DESCRIPTION,
        "Neuron learning rate function"+LearningKernelFactory.DESCRIPTION,
        "Learning Rate - [0.05 0.75](recommend 0.1)(学习率，一般推荐为0.1)"
    };
	

	public Backpropagation() {
		itransferFunction     = TransferFunctionFactory.TRANSFER_SIGMOID;
		learningRateFunction  = LearningKernelFactory.LEARNING_FUNCTION_LINEAR;
		snumNodes="";
		iterations=200;
		learningRate=0.1;
	}
	
    public String globalInfo()
    {
        StringBuffer buffer = new StringBuffer();

        buffer.append("Back Propagation Learning Rule, variable number of hidden layers (default 3)");

        return buffer.toString();
    }
    
    @SuppressWarnings("unchecked")
	public Enumeration listOptions() {
  
        Vector newVector = new Vector(PARAMETERS.length);
        
        for(int i=0;i<PARAMETERS.length;i++){
        	String param = "-"+PARAMETERS[i]+" "+PARAMETER_NOTES[i];
        	newVector.add(new Option("\t"+PARAM_DESCRIPTIONS[i], PARAMETERS[i], 1, param));
        }

        Enumeration enu = super.listOptions();
        while (enu.hasMoreElements()) {
          newVector.addElement(enu.nextElement());
        }
        return newVector.elements();
    }
    
    
	public void setOptions(String[] options) throws Exception {
		super.setOptions(options);
		String [] values = new String[PARAMETERS.length];		 
		for (int i = 0; i < values.length; i++)
		{
			values[i] = weka.core.Utils.getOption(PARAMETERS[i].charAt(0), options);
		}		
    	
		for (int i = 0; i < values.length; i++)
		{
			String data = values[i];
			
			if(data == null || data.length()==0)
			{
				continue;
			}
			
			switch(i)
			{
				case PARAM_NUM_ITERATE:
				{
					iterations = Integer.parseInt(data);
					break;
				}
				case PARAM_NUM_LAYERS:
				{
					numLayers = Integer.parseInt(data);
					break;
				}		
				case PARAM_S_NUM_NODES:
				{
					snumNodes = data;
					String[] snum=data.split(",");
					numNodes=new int[snum.length];
					if(this.numNodes.length>0){
						System.out.print("len:"+this.numNodes.length);
						for(int j=0;j<snum.length;j++)
							numNodes[j]=Integer.parseInt(snum[j]);
					}
					break;
				}
				case PARAM_TRANSFER_FUNCTION:
				{
					itransferFunction=Integer.parseInt(data);
					break;
				}
				case PARAM_LEARNINGRATE_FUNCTION:
				{
					learningRateFunction=Integer.parseInt(data);
					break;
				}
				case PARAM_LEARNINGRATE:
				{
					learningRate=Double.parseDouble(data);
				}
				default:
				{
					throw new Exception("Invalid option offset: " + i);
				}
			}
		}
	}

	public String[] getOptions() {
		LinkedList<String> list = new LinkedList<String>();

		list.add("-"+PARAMETERS[PARAM_NUM_ITERATE]);
        list.add(Integer.toString(iterations));
        
        list.add("-"+PARAMETERS[PARAM_NUM_LAYERS]);
        list.add(Integer.toString(numLayers));

        list.add("-"+PARAMETERS[PARAM_S_NUM_NODES]);
        list.add(snumNodes);
        
        list.add("-"+PARAMETERS[PARAM_TRANSFER_FUNCTION]);
        list.add(Integer.toString(itransferFunction));
        
        list.add("-"+PARAMETERS[PARAM_LEARNINGRATE_FUNCTION]);
        list.add(Integer.toString(learningRateFunction));
        
        list.add("-"+PARAMETERS[PARAM_LEARNINGRATE]);
        list.add(Double.toString(learningRate));

        return (String [] ) list.toArray(new String[list.size()]);
	}
    
	@Override
	public void buildClassifier(Instances data) throws Exception {
		
		//对数据集作预处理
		Instances ins=prepareTrainingDataset(data);
		
		//初始化激励函数
		transferFunction=TransferFunctionFactory.factory(itransferFunction);
		
		//初始化学习率函数
		lrateFunction=LearningKernelFactory.factory(this.learningRateFunction, learningRate, iterations);
		
	    //测试是否可以对此训练数据集作训练
	    getCapabilities().testWithFail(ins);
	    
	    //初始化随机对象
	    rand=new Random(this.m_Seed);
	    
	    //计算输出层节点数目
	    numOutputLayer=this.numOutputLayerNodes(ins);
	    
	    model=new BpModel(this.numAttributes-1, numOutputLayer, numNodes, this.m_Seed, this.itransferFunction);

        //迭代训练
        for(int it=0; it<this.iterations; it++)
        {    
			//得到当前学习率
			double learingRate = currentLearningRate(it);

            //样本集打乱顺序
            ins.randomize(rand);

            Enumeration e = ins.enumerateInstances();
            while(e.hasMoreElements())
            {
                //获取一个样本
                Instance instance = (Instance) e.nextElement();

                //训练样本
                model.caseTraining(learingRate, inputsInstance(instance), outputsInstance(instance));
            }
        }
	}
	
	public int numOutputLayerNodes(Instances ins){
		if(ins.classAttribute().isNumeric())
			return 1;
		else{
			return ins.numClasses();
		}
	}
	
	public double currentLearningRate(int iteration){
		return lrateFunction.currentLearningRate(iterations);
	}
	
	/**
	 * 返回训练样本的输入特征向量
	 * @param in,训练样本
	 * @return,后加输入特征向量
	 */
	public double[]	inputsInstance(Instance in){
		double[] inputs=new double[this.numAttributes-1];
		double[] temp=in.toDoubleArray();
		System.arraycopy(temp, 0, inputs, 0, inputs.length);
		return inputs;
	}
	
	/**
	 * 计算训练样本的期望输出值
	 * @param instance 训练样本
	 * @return 返回期望输出值
	 */
	public double[] outputsInstance(Instance instance){
		double [] expected = new double[numOutputLayer];

        if(instance.classAttribute().isNumeric())
        {
            expected[0] = instance.classValue();
        }
        else
        {
            int classValue = (int) instance.classValue();

            for (int i = 0; i < expected.length; i++)
            {
                if(i == classValue)
                {
                    expected[i] = transferFunction.getMaximum();
                }
                else
                {
                    expected[i] = transferFunction.getMinimum();
                }
            }
        }
        
        return expected;
	}
	

	
	public double[] distributionForInstance(Instance instance) throws Exception {
		double[] inputs=this.inputsInstance(instance);
		double[] output=model.classifyCase(inputs);
		//如果类属性是nominal类型，需要将结果归一化处理
		if(instance.classAttribute().isNominal())
		{
			// 归一化处理
			Utils.normalise(output);

			// 求和
			double sum = 0.0;
			for (int i = 0; i < output.length; i++)
			{
				sum += output[i];
			}

			// 计算百分比
			for (int i = 0; i < output.length; i++)
			{
				output[i] = (output[i] / sum);
			}
		}
		return output;
	}
	
	/**
	 * 训练模型前，对训练数据集的预处理，包括一些验证和过滤操作
	 * @param aInstances 训练数据集
	 * @return 返回符合要求的训练数据集
	 * @throws Exception 验证不通过，将抛出例外
	 */
    protected Instances prepareTrainingDataset(Instances aInstances) throws Exception
    {
        Instances trainingInstances = new Instances(aInstances);

        // 训练数据集必须包括类属性
        if (trainingInstances.classIndex() < 0)
        {
            throw new Exception("这是监督学习，训练数据集没有类属性！");
        }

        // 除类属性外，还必须有其它属性
        if(trainingInstances.numAttributes() <= +1)
        {
            throw new Exception("训练数据集没有包括可比较的属性！");
        }

        // 类属性必须是nominal或者numeric
        if(!trainingInstances.classAttribute().isNominal() && !trainingInstances.classAttribute().isNumeric())
        {
            throw new UnsupportedClassTypeException("类属性必须是nominal或者numeric");
        }

        // 检查每一个属性
        for(int i=0; i<trainingInstances.numAttributes(); i++)
        {
            // 所有非类属性必须是numeric
            if(i != trainingInstances.classIndex())
            {
                if (!trainingInstances.attribute(i).isNumeric())
                {
                    throw new Exception("只有Numeric类型的属性才能用于训练！");
                }
            }
        }

        // 去掉不包括类属性的训练样本
        trainingInstances.deleteWithMissingClass();

        // 训练样本的数目必须大于0
        if (trainingInstances.numInstances() == 0)
        {
            throw new Exception("没有可用的训练数据样本!");
        }

        numInstances = trainingInstances.numInstances();
        numClasses = trainingInstances.numClasses();
        numAttributes = trainingInstances.numAttributes();

        return trainingInstances;
    }
	
	public Capabilities getCapabilities() {		
		Capabilities result = super.getCapabilities();
		result.disableAll();
		//可以处理的属性类型
//		result.enable(Capability.NOMINAL_ATTRIBUTES);
//		result.enable(Capability.BINARY_ATTRIBUTES);
		result.enable(Capability.NUMERIC_ATTRIBUTES);
	    result.enable(Capability.MISSING_VALUES);
	    // 可以处理的类标记
	    result.enable(Capability.NOMINAL_CLASS);
	    result.enable(Capability.NUMERIC_CLASS);
	    result.enable(Capability.MISSING_CLASS_VALUES);
	    
		return result;
	}
	
	public int getNumLayers() {
		return numLayers;
	}

	public void setNumLayers(int numLayers) {
		this.numLayers = numLayers;
	}

	public String getSnumNodes() {
		return snumNodes;
	}

	public void setSnumNodes(String snumNodes) {
		this.snumNodes = snumNodes;
		if(snumNodes.length()>0){
			String[] snum=snumNodes.split(",");
			this.numNodes=new int[snum.length];
			if(this.numNodes.length>0){
				for(int j=0;j<snum.length;j++)
					numNodes[j]=Integer.parseInt(snum[j]);
			}
		}
	}
	
	public String numLayersTipText(){
		return PARAM_DESCRIPTIONS[PARAM_NUM_LAYERS];
	}
	
	public String snumNodesTipText(){
		return PARAM_DESCRIPTIONS[PARAM_S_NUM_NODES];
	}
	
    public String transferFunctionTipText()
    {
        return PARAM_DESCRIPTIONS[PARAM_TRANSFER_FUNCTION];
    }
    
    public String learningRateFunctionTipText(){
    	return PARAM_DESCRIPTIONS[PARAM_LEARNINGRATE_FUNCTION];
    }
    
    public String iterationsTipText(){
    	return PARAM_DESCRIPTIONS[PARAM_NUM_ITERATE];
    }
    
    public String learningRateTipText(){
    	return PARAM_DESCRIPTIONS[PARAM_LEARNINGRATE];
    }
    
	public int getIterations() {
		return iterations;
	}

	public void setIterations(int iterations) {
		this.iterations = iterations;
	}
	
	public double getLearningRate() {
		return learningRate;
	}

	public void setLearningRate(double learningRate) {
		this.learningRate = learningRate;
	}
	
    public void setTransferFunction(SelectedTag l)
    {
        if(l.getTags() == TransferFunctionFactory.TAGS_TRANSFER_FUNCTION)
        {
            itransferFunction = l.getSelectedTag().getID();
        }
    }
    
    public SelectedTag getTransferFunction()
    {
        return new SelectedTag(itransferFunction, TransferFunctionFactory.TAGS_TRANSFER_FUNCTION);
    }
    
    public void setLearningRateFunction(SelectedTag l){
        if(l.getTags() == LearningKernelFactory.TAGS_LEARNING_FUNCTION)
        {
            learningRateFunction = l.getSelectedTag().getID();
        }    	
    }
    
    public SelectedTag	getLearningRateFunction(){
    	return new SelectedTag(learningRateFunction, LearningKernelFactory.TAGS_LEARNING_FUNCTION);
    }

	/**
	 * @param  args
	 */
	public static void main(String[] args) {
		// TODO Auto-generated method stub
		AbstractClassifier.runClassifier(new Backpropagation(), args);
	}
}


