import java.awt.BorderLayout;
import java.util.Random;

import weka.classifiers.Evaluation;
import weka.classifiers.evaluation.ThresholdCurve;
import weka.classifiers.meta.ClassificationViaClustering;
import weka.core.Instances;
import weka.core.Utils;
import weka.core.converters.ConverterUtils.DataSink;
import weka.core.converters.ConverterUtils.DataSource;
import weka.filters.Filter;
import weka.filters.unsupervised.attribute.Remove;
import weka.gui.visualize.PlotData2D;
import weka.gui.visualize.ThresholdVisualizePanel;

public class Assignment3 {
	public static Instances LoadTest() throws Exception {
		return DataSource.read("test.arff");
	}

	public static Instances LoadTrainWith() throws Exception {
		final Instances data = DataSource.read("train_with.arff");
		data.setClassIndex(data.numAttributes() - 1);
		return data;
	}

	public static Instances LoadTrainWithout() throws Exception {
		final Instances data = DataSource.read("train_without.arff");
		data.setClassIndex(data.numAttributes() - 1);
		return data;
	}

	public static void main(final String[] args) throws Exception {
		final Instances train_without = DataSource.read("train_without.arff");
		train_without.setClassIndex(train_without.numAttributes() - 1);
		final Instances train_with = DataSource.read("train_with.arff");
		train_with.setClassIndex(train_with.numAttributes() - 1);
		final Instances test = DataSource.read("test.arff");

		// filter anomaly
		Remove filter = new Remove();
		filter.setAttributeIndices("" + (train_without.classIndex() + 1));
		filter.setInputFormat(train_without);
		final Instances train_without_a = Filter.useFilter(train_without,
				filter);
		final Instances train_with_a = Filter.useFilter(train_with, filter);
		// pca everything
		final Instances train_without_a_pca = Part1Num1.pca(train_without_a);
		final Instances train_with_a_pca = Part1Num1.pca(train_with_a);
		final Instances test_pca = Part1Num1.pca(test);
		// get anomaly back
		filter = new Remove();
		filter.setAttributeIndices("1-" + train_without.classIndex());
		filter.setInputFormat(train_without);
		final Instances train_without_pca = Instances.mergeInstances(
				train_without_a_pca, Filter.useFilter(train_without, filter));
		train_without_pca.setClassIndex(train_without_pca.numAttributes() - 1);
		// DataSink.write("train_without_pca.arff", train_without_pca);
		final Instances train_with_pca = Instances.mergeInstances(
				train_with_a_pca, Filter.useFilter(train_with, filter));
		train_with_pca.setClassIndex(train_with_pca.numAttributes() - 1);
		// DataSink.write("train_with_pca.arff", train_with_pca);

		Part1Num2 p1n2;
		ClassificationViaClustering cvc;
		Evaluation eval;
		// evaluate kmeans as classifier
		for (int x = 2; x < 11; x++) {
			// build clusters
			p1n2 = new Part1Num2(train_without_a_pca);
			p1n2.skm.setNumClusters(x);
			p1n2.skm.buildClusterer(train_without_a_pca);

			// build classifier
			cvc = new ClassificationViaClustering();
			cvc.setClusterer(p1n2.skm);
			cvc.buildClassifier(train_without_pca);
			// evaluate
			eval = new Evaluation(train_with_pca);
			eval.crossValidateModel(cvc, train_with_pca, 10, new Random());
			Assignment3.ShowROC(new ThresholdCurve().getCurve(
					eval.predictions(), 0));
			System.out.println("Confusion Matrix: "
					+ Assignment3.matrixprint(eval.confusionMatrix()));
			System.out.println("Area under curve: " + eval.areaUnderROC(0));
		}
		// evaluate em as classifier
		for (int x = 2; x < 11; x++) {
			// build clusters
			p1n2 = new Part1Num2(train_without_a_pca);
			p1n2.em.setNumClusters(x);
			p1n2.em.buildClusterer(train_without_a_pca);

			// build classifier
			cvc = new ClassificationViaClustering();
			cvc.setClusterer(p1n2.em);
			cvc.buildClassifier(train_without_pca);
			// evaluate
			eval = new Evaluation(train_with_pca);
			eval.crossValidateModel(cvc, train_with_pca, 10, new Random());
			Assignment3.ShowROC(new ThresholdCurve().getCurve(
					eval.predictions(), 0));
			System.out.println("Confusion Matrix: "
					+ Assignment3.matrixprint(eval.confusionMatrix()));
			System.out.println("Area under curve: " + eval.areaUnderROC(0));
		}
		// evaluate kmeans through keough as classifier
		for (int x = 2; x < 11; x++) {
			// build clusters
			p1n2 = new Part1Num2(train_without_a_pca);
			p1n2.skm.setNumClusters(x);
			p1n2.skm.buildClusterer(Part1Num2
					.KeoughVariableSensitivityAlgorithm(new Instances(
							train_without_a_pca), x, .25));

			// build classifier
			cvc = new ClassificationViaClustering();
			cvc.setClusterer(p1n2.skm);
			cvc.buildClassifier(train_without_pca);
			// evaluate
			eval = new Evaluation(train_with_pca);
			eval.crossValidateModel(cvc, train_with_pca, 10, new Random());
			Assignment3.ShowROC(new ThresholdCurve().getCurve(
					eval.predictions(), 0));
			System.out.println("Confusion Matrix: "
					+ Assignment3.matrixprint(eval.confusionMatrix()));
			System.out.println("Area under curve: " + eval.areaUnderROC(0));
		}
		// evaluate clustream as classifier? how? error

		// evaluate test data with pca
		// build clusters
		p1n2 = new Part1Num2(train_without_a_pca);
		p1n2.em.setNumClusters(3);
		p1n2.em.buildClusterer(train_without_a_pca);
		// build classifier
		cvc = new ClassificationViaClustering();
		cvc.setClusterer(p1n2.em);
		cvc.buildClassifier(train_without_pca);
		final Instances test_pca_labeled = Helpers.ClassifyInstances(cvc,
				test_pca);
		DataSink.write("test_pca_labeled.arff", test_pca_labeled);

		// evaluate test data without pca
		// build clusters
		p1n2 = new Part1Num2(train_without_a);
		p1n2.em.setNumClusters(3);
		p1n2.em.buildClusterer(train_without_a);
		// build classifier
		cvc = new ClassificationViaClustering();
		cvc.setClusterer(p1n2.em);
		cvc.buildClassifier(train_without);
		final Instances test_labeled = Helpers.ClassifyInstances(cvc, test);
		DataSink.write("test_labeled.arff", test_labeled);
	}

	public static String matrixprint(final double[][] mat) {
		String s = "[";
		for (final double[] element : mat) {
			s += "[";
			s += element[0];
			for (int y = 1; y < element.length; y++)
				s += ", " + element[y];
			s += "]";
		}
		s += "]";
		return s;
	}

	// from http://weka.wikispaces.com/Generating+ROC+curve
	public static void ShowROC(final Instances result) throws Exception {
		// plot curve
		final ThresholdVisualizePanel vmc = new ThresholdVisualizePanel();
		vmc.setROCString("(Area under ROC = "
				+ Utils.doubleToString(ThresholdCurve.getROCArea(result), 4)
				+ ")");
		vmc.setName(result.relationName());
		final PlotData2D tempd = new PlotData2D(result);
		tempd.setPlotName(result.relationName());
		tempd.addInstanceNumberAttribute();
		// specify which points are connected
		final boolean[] cp = new boolean[result.numInstances()];
		for (int n = 1; n < cp.length; n++)
			cp[n] = true;
		tempd.setConnectPoints(cp);
		// add plot
		vmc.addPlot(tempd);
		// display curve
		final String plotName = vmc.getName();
		final javax.swing.JFrame jf = new javax.swing.JFrame(
				"Weka Classifier Visualize: " + plotName);
		jf.setSize(500, 400);
		jf.getContentPane().setLayout(new BorderLayout());
		jf.getContentPane().add(vmc, BorderLayout.CENTER);
		jf.addWindowListener(new java.awt.event.WindowAdapter() {
			@Override
			public void windowClosing(final java.awt.event.WindowEvent e) {
				jf.dispose();
			}
		});
		jf.setVisible(true);
	}
}