package exp.algorithm;

import timeseriesweka.classifiers.AbstractClassifierWithTrainingData;
import timeseriesweka.classifiers.TSF;

import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;

import org.slf4j.Logger;

import com.alibaba.simpleimage.analyze.sift.ImagePixelArray;
import com.alibaba.simpleimage.analyze.sift.SIFT;
import com.alibaba.simpleimage.analyze.sift.scale.KDFeaturePoint;

import exp.util.MatrixUtil;
import utilities.ClassifierTools;
import utilities.CrossValidator;
import weka.classifiers.bayes.NaiveBayes;
import weka.classifiers.trees.RandomForest;
import weka.classifiers.trees.RandomTree;
import weka.core.Attribute;
import weka.core.DenseInstance;
import weka.core.FastVector;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.TechnicalInformation;
import utilities.SaveParameterInfo;
import utilities.TrainAccuracyEstimate;
import utilities.ClassifierResults;

/**
 * @see TSF
 * @author Alex
 *
 */
public class SiftBasedClassifiers2 extends AbstractClassifierWithTrainingData
		implements SaveParameterInfo, TrainAccuracyEstimate ,Serializable{
	private static final long serialVersionUID = 5895654647529660601L;
	Logger log = org.slf4j.LoggerFactory.getLogger(SiftBasedClassifiers2.class);
	NaiveBayes nb = new NaiveBayes();
	int numFeatures = 128;
	Instances header ;
	/*
	 * There is no benefit from internally doing the CV for this classifier, so
	 * this is just for debugging really. Somewhat tidier
	 */
	boolean trainCV = false;
	/*
	 * If nonTrain results are overwritten with each call to buildClassifier
	 * File opened on this path.
	 */
	String trainCVPath = "";

	public SiftBasedClassifiers2() {
	}



	@Override
	public void writeCVTrainToFile(String train) {
		trainCVPath = train;
		trainCV = true;
	}

	@Override
	public boolean findsTrainAccuracyEstimate() {
		return trainCV;
	}

	@Override
	public ClassifierResults getTrainResults() {
		return trainResults;
	}

	@Override
	public String getParameters() {
		return super.getParameters() ;
	}


	public TechnicalInformation getTechnicalInformation() {
		TechnicalInformation result;
		result = new TechnicalInformation(TechnicalInformation.Type.ARTICLE);
		result.setValue(TechnicalInformation.Field.AUTHOR, "H. Deng, G. Runger, E. Tuv and M. Vladimir");
		result.setValue(TechnicalInformation.Field.YEAR, "2013");
		result.setValue(TechnicalInformation.Field.TITLE,
				"A time series forest for classification and feature extraction");
		result.setValue(TechnicalInformation.Field.JOURNAL, "Information Sciences");
		result.setValue(TechnicalInformation.Field.VOLUME, "239");
		result.setValue(TechnicalInformation.Field.PAGES, "142-153");

		return result;
	}

	Instances transform(Instances data){
		ArrayList<Attribute> atts = new ArrayList<>();
		String name;
		for (int j = 0; j < numFeatures; j++) {
			name = "F" + j;
			atts.add(new Attribute(name));
		}
		// Get the class values as a fast vector
		Attribute target = data.attribute(data.classIndex());

		ArrayList<String> vals = new ArrayList<>(target.numValues());
		for (int j = 0; j < target.numValues(); j++){
			vals.add(target.value(j));
		}
		atts.add(new Attribute(data.attribute(data.classIndex()).name(), vals));
		//*************************instances 结构组装完毕
		// create blank instances with the correct class value
	
		Instances result = new Instances("Tree", atts, data.numInstances());
		result.setClassIndex(result.numAttributes() - 1);
		for (int i = 0; i < data.numInstances(); i++) {
			Instance inst = data.instance(i);
			double arr[] = inst.toDoubleArray();
			ImagePixelArray ipa = MatrixUtil.fillImagePixelArrayRowByRowShiftX(arr);
			SIFT sift = new SIFT();
			int numK = sift.detectFeatures(ipa);
			List<KDFeaturePoint> list = sift.getGlobalKDFeaturePoints();
			int size = list.size();
//			log.debug("Instance label {},features {}", inst.classValue(), numK);
			int feat[] = new int[128];
			for (int j = 0; j < size; j++) {
				KDFeaturePoint kp = list.get(j);
				for(int t= 0;t<feat.length;t++)
					feat[t]+=kp.descriptor[t];
			}
			
			DenseInstance in = new DenseInstance(result.numAttributes());
			for(int x =0;x<feat.length;x++){
				in.setValue(x, feat[x]);
			}
			in.setValue(result.numAttributes() - 1, data.instance(i).classValue());
			result.add(in);
		}
		header = new Instances(result, 0);
		return result;

	}
	
	Instance transform(Instance data){
		double arr[] = data.toDoubleArray();
		ImagePixelArray ipa = MatrixUtil.fillImagePixelArrayRowByRowShiftX(arr);
		SIFT sift = new SIFT();
		int numK = sift.detectFeatures(ipa);
		List<KDFeaturePoint> list = sift.getGlobalKDFeaturePoints();
		int size = list.size();
		int feat[] = new int[numFeatures];
		for (int j = 0; j < size; j++) {
			KDFeaturePoint kp = list.get(j);
			for(int t= 0;t<feat.length;t++)
				feat[t]+=kp.descriptor[t];
		}
		
		DenseInstance in = new DenseInstance(header.numAttributes());
		in.setDataset(header);
		return in;

	}
	
	@Override
	public void buildClassifier(Instances data) throws Exception {
		numFeatures = 128;
		data = transform(data);
		nb.buildClassifier(data);
	}

	@Override
	public double[] distributionForInstance(Instance ins) throws Exception {
		return nb.distributionForInstance(transform(ins));
	}

	public static void main(String[] arg) throws Exception {
		/*FeatureSet f = new FeatureSet();
		double[] y = { 0, 4, 8, 12, 16 };
		f.setFeatures(y);

		System.out.println(f + "");
		// Set up instances size and format.
		FastVector atts = new FastVector();
		Instances train = ClassifierTools
				.loadData("C:\\Users\\ajb\\Dropbox\\TSC Problems\\ItalyPowerDemand\\ItalyPowerDemand_TRAIN");
		Instances test = ClassifierTools
				.loadData("C:\\Users\\ajb\\Dropbox\\TSC Problems\\ItalyPowerDemand\\ItalyPowerDemand_TEST");
		RandomForest tsf = new RandomForest();
		tsf.writeCVTrainToFile(
				"C:\\Users\\ajb\\Dropbox\\Spectral Interval Experiments\\RIF\\Predictions\\InternalCV0.csv");

		tsf.buildClassifier(train);
		System.out.println(
				"build ok: original atts=" + train.numAttributes() + " new atts =" + tsf.testHolder.numAttributes());
		double a = ClassifierTools.accuracy(test, tsf);
		System.out.println(" Accuracy =" + a);
		/*
		 * //Get the class values as a fast vector Attribute target
		 * =data.attribute(data.classIndex());
		 * 
		 * FastVector vals=new FastVector(target.numValues()); for(int
		 * j=0;j<target.numValues();j++) vals.addElement(target.value(j));
		 * atts.addElement(new
		 * Attribute(data.attribute(data.classIndex()).name(),vals)); //Does
		 * this create the actual instances? Instances result = new
		 * Instances("Tree",atts,data.numInstances()); for(int
		 * i=0;i<data.numInstances();i++){ DenseInstance in=new
		 * DenseInstance(result.numAttributes()); result.add(in); }
		 * result.setClassIndex(result.numAttributes()-1); Instances testHolder
		 * =new Instances(result,10); //For each tree System.out.println(
		 * "Train size "+result.numInstances()); System.out.println("Test size "
		 * +testHolder.numInstances());
		 */
	}
}
