import java.io.File;

import weka.classifiers.meta.ClassificationViaClustering;
import weka.clusterers.ClusterEvaluation;
import weka.core.Instances;

public class Part1Num3 {
	public static void main(final String[] args) throws Exception {
//		final Assignment3 ass = new Assignment3();
//		if (!new File("HW3TrainV2_AnomalyClean.arff").exists())
//			Helpers.cleanAnomalousDataFromCSV(Paths.GetTrainingInputPath());
//		ass.setTraining("HW3TrainV2_AnomalyClean.arff");
//		Instances train = ass.m_Training;
//		if (!new File("HW3TestV2.arff").exists())
//			Helpers.csvToArff(Paths.GetTestingInputPath());
//		ass.setTraining("HW3TestV2.arff");
//		// Part1Num2 pm = new Part1Num2(Part1Num1.pca(ass.m_Training));
//		new Part1Num3(train, ass.m_Training);
	}

	Part1Num2 p1n2;
	double[][][] res;

	public Part1Num3(Instances train, Instances test) throws Exception {
//		p1n2 = new Part1Num2(train);
//		ClusterEvaluation ce = p1n2.EvalKM(test);
//		System.out.println(ce.clusterResultsToString());
		Part1Num2.Clustream(train);
//		System.out.println(ce.getLogLikelihood());
//		System.out.println(ce.getNumClusters());
//		System.out.println(ce.getRevision());
//		ClassificationViaClustering cc;
//		cc = new ClassificationViaClustering();
//		cc.setClusterer(p1n2.skm);
//		cc.buildClassifier(test);
//		weka.classifiers.meta.ClassificationViaClustering()
	}

}
