package de.dailab.irml.hb;

import java.io.File;
import java.io.IOException;
import java.util.HashMap;

import weka.classifiers.Classifier;
import weka.classifiers.evaluation.Evaluation;
import weka.classifiers.evaluation.output.prediction.Null;
import weka.classifiers.functions.GaussianProcesses;
import weka.classifiers.functions.KernelLogisticRegression;
import weka.classifiers.functions.Logistic;
import weka.classifiers.functions.SGD;
import weka.classifiers.functions.supportVector.Kernel;
import weka.classifiers.meta.LogitBoost;
import weka.classifiers.misc.InputMappedClassifier;
import weka.core.Attribute;
import weka.core.DenseInstance;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.SelectedTag;
import weka.core.converters.ArffSaver;
import weka.core.converters.CSVLoader;
import weka.core.converters.CSVSaver;
import weka.filters.Filter;
import weka.filters.unsupervised.attribute.Normalize;
import weka.filters.unsupervised.attribute.Remove;
import weka.filters.unsupervised.instance.RemovePercentage;

public class HiggsBosonTools {
	private Instances input;
	private Instances output;
	private Instances submit;
	private Attribute claAtr;
	private HashMap<Integer, Integer> idMap;
	private double trainingRatio;

	public HiggsBosonTools() {
	}

	public String testWeightModel(Classifier regressor) throws Exception {
		// remove id label
		int[] indices = { 0, 32 };
		Remove attrFilter = new Remove();
		attrFilter.setAttributeIndicesArray(indices);
		attrFilter.setInputFormat(input);
		Instances filteredInput = Filter.useFilter(input, attrFilter);
		filteredInput.setClassIndex(30);

		// split
		System.out.println("spliting data");
		Instances training;
		Instances test;
		RemovePercentage rp = new RemovePercentage();
		rp.setPercentage(trainingRatio);
		rp.setInputFormat(filteredInput);
		training = Filter.useFilter(filteredInput, rp);
		rp.setInvertSelection(true);
		test = Filter.useFilter(filteredInput, rp);

		// training
		System.out.println("building model");
		regressor.buildClassifier(training);

		// evaluation
		Evaluation evaluation = new Evaluation(training);
		Object[] out = new Object[1];
		out[0] = new Null();

		//
		System.out.println("evaluatiing model");
		evaluation.evaluateModel(regressor, test, out);
		return evaluation.toSummaryString();
	}

	public String testLabelModel(Classifier regressor) throws Exception {
		// remove id label
		int[] indices = { 0, 31 };
		Remove attrFilter = new Remove();
		attrFilter.setAttributeIndicesArray(indices);
		attrFilter.setInputFormat(input);
		Instances filteredInput = Filter.useFilter(input, attrFilter);
		filteredInput.setClassIndex(30);

		// split
		System.out.println("spliting data");
		Instances training;
		Instances test;
		RemovePercentage rp = new RemovePercentage();
		rp.setPercentage(trainingRatio);
		rp.setInputFormat(filteredInput);
		training = Filter.useFilter(filteredInput, rp);

		System.out.println("training: " + training.size());
		rp.setInvertSelection(true);
		test = Filter.useFilter(filteredInput, rp);
		System.out.println("test: " + test.size());
		// training
		System.out.println("building model");
		regressor.buildClassifier(training);

		// evaluation
		Evaluation evaluation = new Evaluation(training);

		//
		System.out.println("evaluatiing model");
		evaluation.evaluateModel(regressor, test);
		return evaluation.toSummaryString();
	}

	public Classifier trainLabelModel(Classifier regressor) throws Exception {
		// remove id label
		System.out.println("removing id and weight");
		int[] indices = { 0, 31 };
		Remove attrFilter = new Remove();
		attrFilter.setAttributeIndicesArray(indices);
		attrFilter.setInputFormat(input);
		Instances filteredInput = Filter.useFilter(input, attrFilter);
		// nomalizer.setInputFormat(filteredInput);
		// filteredInput = Filter.useFilter(filteredInput, nomalizer);
		filteredInput.setClassIndex(30);

		System.out.println("building map");
		InputMappedClassifier predictor = new InputMappedClassifier();
		predictor.setTestStructure(output);
		predictor.setModelHeader(filteredInput);
		predictor.setClassifier(regressor);
		// training
		System.out.println("building model");
		regressor.buildClassifier(filteredInput);
		System.out.println("model built");

		return predictor;
	}

	public void loadSubmit(String path) throws IOException {

		System.out.println("Loading submit");
		File f = new File(path);
		CSVLoader loader = new CSVLoader();
		loader.setSource(f);
		System.out.println("converting submit");
		submit = loader.getDataSet();
		submit.clear();
		System.out.println("submit converted");

	}

	public void saveSubmit(String path) throws IOException {

		System.out.println("setting submit");
		File f = new File(path);
		CSVSaver saver = new CSVSaver();
		saver.setStructure(submit);
		saver.setInstances(submit);
		saver.setFile(f);
		System.out.println("writting submit");
		saver.writeBatch();
		System.out.println("submit written");
	}

	public void predict(Classifier regressor) throws Exception {
		System.out.println("map built");
		submit.deleteAttributeAt(2);
		submit.insertAttributeAt(claAtr, 2);
		int po = 1;
		int neg = 550000;
		int start = 350000;
		int order = 0;
		Remove rm = new Remove();
		rm.setInputFormat(output);
		int[] rmidx = { 0 };
		rm.setAttributeIndicesArray(rmidx);
		output = Filter.useFilter(output, rm);
		// nomalizer_test.setInputFormat(output);
		// output = Filter.useFilter(output, nomalizer_test);
		for (int i = 0; i < output.size(); i++) {
			Instance testI = output.get(i);
			double l = regressor.classifyInstance(testI);
			if (Double.compare(l, 0) == 0) {
				order = po++;
			} else {
				order = neg--;
			}
			DenseInstance ins = new DenseInstance(3);
			ins.setDataset(submit);
			ins.setValue(0, start++);
			ins.setValue(1, order);
			ins.setValue(2, l);
			submit.add(ins);
		}
	}

	public Classifier getGaussianProcesses(Kernel kernel, double noise)
			throws Exception {
		GaussianProcesses gp = new GaussianProcesses();
		gp.setKernel(kernel);
		gp.setNoise(noise);
		gp.setDebug(false);
		return gp;
	}

	public Classifier getKernelLogisticRegression(Kernel kernel) {
		KernelLogisticRegression lr = new KernelLogisticRegression();
		lr.setKernel(kernel);
		return lr;

	}

	public Classifier getLogisticRegression() {
		Logistic lr = new Logistic();
		return lr;
	}

	public static void convertTraining(String inPath, String outPath)
			throws Exception {
		System.out.println("Loading");
		File f = new File(inPath);
		CSVLoader loader = new CSVLoader();
		loader.setSource(f);

		System.out.println("converting");
		Instances trData = loader.getDataSet();
		double maxP = 0;
		double maxN = 0;
		for (int i = 0; i < trData.size(); i++) {
			Instance dp = trData.get(i);
			double w = dp.value(31);
			double c = dp.value(32);
			for (int j = 0; j < 31; j++) {
				if (dp.value(j) < -998)
					dp.setMissing(j);
			}
			if (c == 1) {
				if (w > maxN)
					maxN = w;
			} else {
				if (w > maxP)
					maxP = w;
			}
		}

		maxP = maxP / 2;

		for (int i = 0; i < trData.size(); i++) {
			Instance dp = trData.get(i);
			double c = dp.value(32);
			double w = dp.value(31);
			dp.setValue(31, Math.log(w));
			if (c == 1) {
				w = w / maxN;
			} else {
				w = w / maxP;
			}
			dp.setWeight(w);
		}

		System.out.println("saving");
		ArffSaver saver = new ArffSaver();
		saver.setInstances(trData);
		saver.setFile(new File(outPath));
		// saver.setDestination(new File(outPath));
		saver.writeBatch();
		System.out.println("converted");
	}

	private void loadTrainingWithReplace(String inPath) throws Exception {
		System.out.println("loading training");
		File f = new File(inPath);
		CSVLoader loader = new CSVLoader();
		loader.setSource(f);

		System.out.println("converting training");
		input = loader.getDataSet();
		claAtr = input.attribute(32);
		for (int k = 0; k < 30; k++) {
			Attribute atr = new Attribute(input.attribute(k + 1).name()
					+ "_missing");
			input.insertAttributeAt(atr, 33 + k);
		}

		double maxP = 0;
		double maxN = 0;
		for (int i = 0; i < input.size(); i++) {
			Instance dp = input.get(i);
			double w = dp.value(31);
			double c = dp.value(32);
			for (int j = 1; j <= 30; j++) {
				if (dp.value(j) < -998) {
					dp.setValue(j, 0);
					dp.setValue(32 + j, 1);
				} else {
					dp.setValue(32 + j, 0);
				}
			}
			if (c == 1) {
				if (w > maxN)
					maxN = w;
			} else {
				if (w > maxP)
					maxP = w;
			}
		}

		// maxP = maxP / 2;

		for (int i = 0; i < input.size(); i++) {
			Instance dp = input.get(i);
			double c = dp.value(32);
			double w = dp.value(31);
			dp.setValue(31, Math.log(w));
			if (c == 1) {
				w = w / maxN;
			} else {
				w = w / maxP;
			}
			dp.setWeight(w);
		}
		System.out.println("training converted, size: " + input.size());
	}

	private void loadTraining(String inPath) throws Exception {
		System.out.println("loading training");
		File f = new File(inPath);
		CSVLoader loader = new CSVLoader();
		loader.setSource(f);

		System.out.println("converting training");
		input = loader.getDataSet();
		claAtr = input.attribute(32);

		double maxP = 0;
		double maxN = 0;
		for (int i = 0; i < input.size(); i++) {
			Instance dp = input.get(i);
			double w = dp.value(31);
			double c = dp.value(32);
			for (int j = 1; j <= 30; j++) {
				if (dp.value(j) < -998)
					dp.setMissing(j);
			}
			if (c == 1) {
				if (w > maxN)
					maxN = w;
			} else {
				if (w > maxP)
					maxP = w;
			}
		}

		// maxP = maxP / 2;

		for (int i = 0; i < input.size(); i++) {
			Instance dp = input.get(i);
			double c = dp.value(32);
			double w = dp.value(31);
			dp.setValue(31, Math.log(w));
			if (c == 1) {
				w = w / maxN;
			} else {
				w = w / maxP;
			}
			dp.setWeight(w);
		}
		System.out.println("training converted, size: " + input.size());
	}

	public void loadTest(String inPath) throws Exception {
		System.out.println("loading test");
		File f = new File(inPath);
		CSVLoader loader = new CSVLoader();
		loader.setSource(f);
		System.out.println("converting test");
		output = loader.getDataSet();
		for (int i = 0; i < output.size(); i++) {
			Instance dp = output.get(i);
			// double w = dp.value(31);
			for (int j = 1; j <= 30; j++) {
				if (dp.value(j) < -998)
					dp.setMissing(j);
			}

		}

		System.out.println("test loaded, size: " + output.size());
	}

	public void loadTestWithReplace(String inPath) throws Exception {
		System.out.println("loading test");
		File f = new File(inPath);
		CSVLoader loader = new CSVLoader();
		loader.setSource(f);
		System.out.println("converting test");
		output = loader.getDataSet();
		for (int k = 0; k < 30; k++) {
			Attribute atr = new Attribute(output.attribute(k + 1).name()
					+ "_missing");
			output.insertAttributeAt(atr, 31 + k);
		}

		for (int i = 0; i < output.size(); i++) {
			Instance dp = output.get(i);
			// double w = dp.value(31);
			for (int j = 1; j <= 30; j++) {
				if (dp.value(j) < -998) {
					dp.setValue(j, 0);
					dp.setValue(30 + j, 1);
				} else {
					dp.setValue(30 + j, 0);
				}
			}

		}

		System.out.println("test loaded, size: " + output.size());
	}

	public static void main(String[] args) {
		try {
			HiggsBosonTools hb = new HiggsBosonTools();
			hb.loadTrainingWithReplace(args[0]);
			hb.loadTestWithReplace(args[1]);
			LogitBoost cls = new LogitBoost();
			SGD base = new SGD();
			base.setDontNormalize(false);
			base.setLossFunction(new SelectedTag(SGD.SQUAREDLOSS,
					SGD.TAGS_SELECTION));
			cls.setClassifier(base);
			// cls.setNumFolds(10);
			cls.setNumIterations(10);
			Classifier predictor = hb.trainLabelModel(cls);
			hb.loadSubmit(args[2]);
			hb.predict(predictor);
			hb.saveSubmit(args[3]);
		} catch (Exception e) {
			System.out.println("training failed");
			System.out.println(e.getMessage());
		}
	}
}
