/**
 * 2017年12月14日
 */
package exp.paper;

import java.util.Enumeration;
import java.util.Vector;

import org.slf4j.Logger;

import exp.util.DatasetsUtil;
import timeseriesweka.classifiers.TSF;
import utilities.FoldCreator;
import weka.classifiers.Classifier;
import weka.classifiers.IteratedSingleClassifierEnhancer;
import weka.classifiers.rules.ZeroR;
import weka.core.AdditionalMeasureProducer;
import weka.core.Attribute;
import weka.core.Capabilities;
import weka.core.DenseInstance;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.Option;
import weka.core.OptionHandler;
import weka.core.RevisionUtils;
import weka.core.TechnicalInformation;
import weka.core.TechnicalInformationHandler;
import weka.core.Utils;
import weka.core.WeightedInstancesHandler;
import weka.core.Capabilities.Capability;
import weka.core.TechnicalInformation.Field;
import weka.core.TechnicalInformation.Type;

/**
 * @author Alex
 *
 */
public class CasForest extends IteratedSingleClassifierEnhancer
		implements OptionHandler, AdditionalMeasureProducer, WeightedInstancesHandler, TechnicalInformationHandler {
	Logger log = org.slf4j.LoggerFactory.getLogger(CasForest.class);
	static final long serialVersionUID = -2368937577670527151L;
	protected double m_shrinkage = 1.0;
	protected int m_NumIterationsPerformed;
	protected ZeroR m_zeroR;
	protected boolean m_SuitableData = true;
	//double epision = -1;
	Instances[] iterInstances;
	double delta = 0.0005;
	
	public int getActualClassifiersLength(){
		return this.m_NumIterationsPerformed;
	}
	
	public void buildClassifier(Instances data) throws Exception {
		super.buildClassifier(data);
		iterInstances = new Instances[super.m_Classifiers.length];
		Instances newData = new Instances(data);
		newData.deleteWithMissingClass();
		double sum = 0;
		double temp_sum = 0;
		m_zeroR = new ZeroR();
		m_zeroR.buildClassifier(newData);
		if (newData.numAttributes() == 1) {
			System.err.println("Cannot build model (only class attribute present in data!), " + "using ZeroR model instead!");
			m_SuitableData = false;
			return;
		} else {
			m_SuitableData = true;
		}
		double error = 1.0;
		double errorBefore = error;
		Instances iterResult = newData;
		// error[0] = epision;
		m_NumIterationsPerformed = 0;
		do {
			temp_sum = sum;
			FoldCreator fc = new FoldCreator(0.8);
			fc.deleteFirstAtt(false);
			Instances [] parts = fc.createSplit(iterResult, 10);
			iterInstances[m_NumIterationsPerformed] = parts[0];
			m_Classifiers[m_NumIterationsPerformed].buildClassifier(parts[0]);
			// 形成新的训练集 用于下一次的训练
			errorBefore = error;
			error = getError(m_Classifiers[m_NumIterationsPerformed], parts[1]);
			if(Math.abs(error - errorBefore) < this.delta){
				log.error("Error - errorBefore too small current error {} ,before error {} delta {}",error,errorBefore,delta);
				break;
			}
			iterResult = extend(m_Classifiers[m_NumIterationsPerformed], iterResult);
			m_NumIterationsPerformed++;
			// 迭代的误差减少很少 且误差没达到要求 提前结束的条件
		} while (m_NumIterationsPerformed < m_Classifiers.length);
	}
	
	private double getError(Classifier c,Instances data) throws Exception {
		int numInst = data.numInstances();
		int correct = 0;
		for (int i = 0; i < numInst; i++) {
			Instance in = data.instance(i);
			double[] dist = c.distributionForInstance(in);
			int idx = Utils.maxIndex(dist);
			if (idx == (int) in.classValue()) {
				correct++;
			}
		}
		double error = 1.0 - ((double) correct / (double) numInst);
		return error;
	}
	
	private Instances extend(Classifier c, Instances data) throws Exception {
		Instances newData = DatasetsUtil.initInstancesWithParmas("cas_extened_dataset",data.numAttributes() -1 + data.numClasses(), "cas_extended_", data.numInstances(), true,(Attribute) data.classAttribute().copy("cas_extend_target"));
		int numInst = data.numInstances();
		for (int i = 0; i < numInst; i++) {
			Instance in = data.instance(i);
			double[] inst = in.toDoubleArray();
			double[] dist = c.distributionForInstance(in);
			double[] vals = new double[newData.numAttributes()];
			// 排除类标
			System.arraycopy(inst, 0, vals, 0, inst.length - 1);
			//复制预测结果
			System.arraycopy(dist, 0, vals, inst.length-1, dist.length);
			//设置类标
			vals[vals.length-1] = in.classValue();
			DenseInstance di = new DenseInstance(1.0, vals);
			di.setDataset(newData);
			newData.add(di);
		}
		return newData;
	}
	
	public double classifyInstance(Instance inst) throws Exception {
		double prediction = m_zeroR.classifyInstance(inst);
		if (!m_SuitableData) {
			return prediction;
		}
		Instance ptr = inst;
		double dist[] = null;
		for (int i = 0; i < m_NumIterationsPerformed; i++) {
			ptr.setDataset(iterInstances[i]);
			dist = m_Classifiers[i].distributionForInstance(ptr);
			if(i == m_Classifiers.length-1)continue;
			ptr = extendInstanceFeatureSpance(ptr, iterInstances[i],iterInstances[i+1]==null?null:iterInstances[i+1], dist);
		}
		return Utils.maxIndex(dist);
	}
	Instance extendInstanceFeatureSpance(Instance original, Instances originalHeader,Instances extnedInstances,double []dist){
		if(extnedInstances == null){
			return original;
		}
		double array [] = original.toDoubleArray();
		double newArray[] = new double[array.length-1+dist.length+1];
		System.arraycopy(array, 0, newArray, 0, array.length-1);
		System.arraycopy(dist, 0, newArray, array.length-1, dist.length);
		newArray[newArray.length-1] = original.classValue();
		DenseInstance di = new DenseInstance(1.0,newArray);
		di.setDataset(extnedInstances);
		return di;
	}

//	private Instances residualReplace(Instances data, Classifier c, boolean useShrinkage) throws Exception {
//		double pred, residual;
//		Instances newInst = new Instances(data);
//
//		for (int i = 0; i < newInst.numInstances(); i++) {
//			pred = c.classifyInstance(newInst.instance(i));
//			if (useShrinkage) {
//				pred *= getShrinkage();
//			}
//			residual = newInst.instance(i).classValue() - pred;
//			newInst.instance(i).setClassValue(residual);
//		}
//		return newInst;
//	}

	/**
	 * Returns an enumeration of the additional measure names
	 * 
	 * @return an enumeration of the measure names
	 */
	public Enumeration enumerateMeasures() {
		Vector newVector = new Vector(1);
		newVector.addElement("measureNumIterations");
		return newVector.elements();
	}

	/**
	 * Returns the value of the named measure
	 * 
	 * @param additionalMeasureName
	 *            the name of the measure to query for its value
	 * @return the value of the named measure
	 * @throws IllegalArgumentException
	 *             if the named measure is not supported
	 */
	public double getMeasure(String additionalMeasureName) {
		if (additionalMeasureName.compareToIgnoreCase("measureNumIterations") == 0) {
			return measureNumIterations();
		} else {
			throw new IllegalArgumentException(additionalMeasureName + " not supported (AdditiveRegression)");
		}
	}

	/**
	 * return the number of iterations (base classifiers) completed
	 * 
	 * @return the number of iterations (same as number of base classifier
	 *         models)
	 */
	public double measureNumIterations() {
		return m_NumIterationsPerformed;
	}

	/**
	 * Returns textual description of the classifier.
	 *
	 * @return a description of the classifier as a string
	 */
	public String toString() {
		StringBuffer text = new StringBuffer();
		// only ZeroR model?
		if (!m_SuitableData) {
			StringBuffer buf = new StringBuffer();
			buf.append(this.getClass().getName().replaceAll(".*\\.", "") + "\n");
			buf.append(this.getClass().getName().replaceAll(".*\\.", "").replaceAll(".", "=") + "\n\n");
			buf.append("Warning: No model could be built, hence ZeroR model is used:\n\n");
			buf.append(m_zeroR.toString());
			return buf.toString();
		}
		if (m_NumIterations == 0) {
			return "Classifier hasn't been built yet!";
		}
		text.append("Additive Regression\n\n");
		text.append("ZeroR model\n\n" + m_zeroR + "\n\n");
		text.append("Base classifier " + getClassifier().getClass().getName() + "\n\n");
		text.append("" + m_NumIterationsPerformed + " models generated.\n");
		for (int i = 0; i < m_NumIterationsPerformed; i++) {
			text.append("\nModel number " + i + "\n\n" + m_Classifiers[i] + "\n");
		}
		return text.toString();
	}

	/**
	 * Returns the revision string.
	 * 
	 * @return the revision
	 */
	public String getRevision() {
		return RevisionUtils.extract("$Revision: 8034 $");
	}

	/**
	 * Main method for testing this class.
	 *
	 * @param argv
	 *            should contain the following arguments: -t training file [-T
	 *            test file] [-c class index]
	 */
	public static void main(String[] argv) {
		// runClassifier(new CascadeForest(), argv);
	}
	public TechnicalInformation getTechnicalInformation() {
		TechnicalInformation result = new TechnicalInformation(Type.TECHREPORT);
		result.setValue(Field.AUTHOR, "J.H. Friedman");
		result.setValue(Field.YEAR, "1999");
		result.setValue(Field.TITLE, "Stochastic Gradient Boosting");
		result.setValue(Field.INSTITUTION, "Stanford University");
		result.setValue(Field.PS, "http://www-stat.stanford.edu/~jhf/ftp/stobst.ps");
		return result;
	}
	@SuppressWarnings({ "rawtypes", "unchecked" })
	public Enumeration listOptions() {
		Vector newVector = new Vector(4);
		newVector.addElement(
				new Option("\tSpecify shrinkage rate. " + "(default = 1.0, ie. no shrinkage)\n", "S", 1, "-S"));
		Enumeration enu = super.listOptions();
		while (enu.hasMoreElements()) {
			newVector.addElement(enu.nextElement());
		}
		return newVector.elements();
	}

	public void setOptions(String[] options) throws Exception {

		String optionString = Utils.getOption('S', options);
		if (optionString.length() != 0) {
			Double temp = Double.valueOf(optionString);
			setShrinkage(temp.doubleValue());
		}

		super.setOptions(options);
	}

	public String[] getOptions() {

		String[] superOptions = super.getOptions();
		String[] options = new String[superOptions.length + 2];
		int current = 0;

		options[current++] = "-S";
		options[current++] = "" + getShrinkage();

		System.arraycopy(superOptions, 0, options, current, superOptions.length);

		current += superOptions.length;
		while (current < options.length) {
			options[current++] = "";
		}
		return options;
	}

	public String shrinkageTipText() {
		return "Shrinkage rate. Smaller values help prevent overfitting and "
				+ "have a smoothing effect (but increase learning time). " + "Default = 1.0, ie. no shrinkage.";
	}

	
	public String globalInfo() {
		return "";
	}

	public CasForest(int maxLen) {
		this(new TSF(),maxLen,0.005);
	}
	public CasForest() {
		this(new TSF(),5,0.0005);
		//this.epision = epision;
	}
	public CasForest(Classifier c){
		this(c,5,0.0005);
	}
	public CasForest(Classifier classifier , int classifierLength,double delta) {
		m_Classifier = classifier;
		this.m_NumIterations = classifierLength;
		this.delta = delta;
		
	}
	protected String defaultClassifierString() {
		return "xxx";
	}
	public void setShrinkage(double l) {
		m_shrinkage = l;
	}
	public double getShrinkage() {
		return m_shrinkage;
		
	}

	public Capabilities getCapabilities() {
		Capabilities result = super.getCapabilities();
		result.disableAllClasses();
		result.disableAllClassDependencies();
		result.enable(Capability.NUMERIC_CLASS);
		result.enable(Capability.DATE_CLASS);
		return result;
	}

}
