package LPU;

import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;

import weka.classifiers.Classifier;
import weka.classifiers.bayes.BayesNet;
import weka.classifiers.evaluation.EvaluationUtils;
import weka.classifiers.evaluation.NominalPrediction;
import weka.core.FastVector;
import weka.core.Instances;
import ds.BugInfo;
import ds.ClassifierInfo;
import ds.Constants;

public class SecondStepForLPU {
	private ArrayList<BugInfo> P;
	private ArrayList<BugInfo> RN;
	private ArrayList<BugInfo> Q;
	private ArrayList<BugInfo> T;

	private String label;

	private ArrayList<String> trainFileName;
	private ArrayList<String> unlabelTestFileName;
	private String posTestFileName;

	public SecondStepForLPU(ArrayList<BugInfo> pos, ArrayList<BugInfo> unlabel,
			ArrayList<BugInfo> rn, ArrayList<BugInfo> test) {
		P = new ArrayList<BugInfo>(pos);
		RN = new ArrayList<BugInfo>(rn);
		Q = new ArrayList<BugInfo>(unlabel);
		Q.removeAll(rn);
		T = new ArrayList<BugInfo>(test);

		this.label = P.get(0).label;

		this.trainFileName = new ArrayList<String>();
		this.unlabelTestFileName = new ArrayList<String>();
		this.posTestFileName = "data/lpu/pos.arff";
	}

	public void writeTrain(int round) {
		ArrayList<BugInfo> train = new ArrayList<BugInfo>();
		train.addAll(this.P);
		train.addAll(this.RN);
		util.WekaDataWrapper.translateToArffTF(train, label,
				Constants.LPU_WordList, trainFileName.get(round));
	}

	public void writeUnlabelTest(int round) {
		util.WekaDataWrapper.translateToArffTF(Q, label,
				Constants.LPU_WordList, unlabelTestFileName.get(round));
	}

	public void writePosTest() {
		util.WekaDataWrapper.translateToArffTF(P, label,
				Constants.LPU_WordList, posTestFileName);
	}

	public Classifier BayesNetItrator() {
		int roundCount = 0;
		try {
			/** get train & test data */
			// positive test data
			writePosTest();
			Instances posTestData = new Instances(new BufferedReader(
					new FileReader(posTestFileName)));
			posTestData.setClassIndex(0);

			ArrayList<ClassifierInfo> classifiers = new ArrayList<ClassifierInfo>();

			while (true) {
				ClassifierInfo curClassifier = new ClassifierInfo();

				trainFileName
						.add("data/lpu/train_round" + roundCount + ".arff");
				unlabelTestFileName.add("data/lpu/unTest_round" + roundCount
						+ ".arff");

				writeTrain(roundCount);
				writeUnlabelTest(roundCount);

				// P+RN
				Instances trainData = new Instances(new BufferedReader(
						new FileReader(trainFileName.get(roundCount))));
				trainData.setClassIndex(0);// change from numAttributes()-1 to 0
				// Q
				Instances testData = new Instances(new BufferedReader(
						new FileReader(unlabelTestFileName.get(roundCount))));
				testData.setClassIndex(0);

				/** Option handling */
				// String param =
				// "weka.classifiers.bayes.BayesNet -D -Q weka.classifiers.bayes.net.search.local.K2 -- -P 1 -S BAYES -E weka.classifiers.bayes.net.estimate.SimpleEstimator -- -A 0.5";

				/** build classifier */
				BayesNet cls = new BayesNet(); // new instance of bayes net
				// cls.setOptions(weka.core.Utils.splitOptions(param)); // set
				// the options
				cls.buildClassifier(trainData);
				curClassifier.cls = cls;

				/** evaluate classifier */
				EvaluationUtils eval = new EvaluationUtils();
				FastVector rnResult = eval.getTestPredictions(cls, testData);
				// predict: 0 is positive, 1 is non.
				ArrayList<BugInfo> toRemove = new ArrayList<BugInfo>();
				for (int i = 0; i < rnResult.size(); i++) {
					NominalPrediction curPredict = (NominalPrediction) rnResult
							.elementAt(i);
					if (curPredict.predicted() == 1.0) {
						RN.add(Q.get(i));
						toRemove.add(Q.get(i));
					}
				}

				FastVector posResult = eval
						.getTestPredictions(cls, posTestData);
				int posCount = 0;
				for (int i = 0; i < rnResult.size(); i++) {
					NominalPrediction curResult = (NominalPrediction) posResult
							.elementAt(i);
					if (curResult.predicted() == curResult.actual())
						posCount++;
				}
				curClassifier.posRecall = posCount / posResult.size();
				classifiers.add(curClassifier);

				if (toRemove.size() == 0)
					break;
				else {
					Q.removeAll(toRemove);
					roundCount++;
				}
			}

			Collections.sort(classifiers,
					new ClassifierInfo.classifierComparator());
			return classifiers.get(0).cls;
		} catch (Exception e) {
			e.printStackTrace();
		}
		return null;
	}

	public void getResult() {
		Classifier cls = BayesNetItrator();
		util.WekaDataWrapper.translateToArffTF(T, label,
				Constants.LPU_WordList, "output/LPU/my/test.arff");
		Instances testData;
		try {
			testData = new Instances(new BufferedReader(new FileReader(
					"output/LPU/my/test.arff")));
			testData.setClassIndex(0);
			EvaluationUtils eval = new EvaluationUtils();
			FastVector result = eval.getTestPredictions(cls, testData);
			int tp = 0, tn = 0, fp = 0, fn = 0;
			for (int i = 0; i < result.size(); i++) {
				NominalPrediction cur = (NominalPrediction) result.elementAt(i);
				if (cur.predicted() == cur.actual()) {
					if (cur.predicted() == 0.0)
						tp++;
					else
						tn++;
				} else {
					if (cur.predicted() > cur.actual())
						fn++;
					else
						fp++;
				}
			}
			double precision = ((double) tp) / (tp + fp);
			double recall = ((double) tp) / (tp + fn);
			double fScore = 2 * precision * recall / (precision + recall);
			System.out.print(this.label + ": " + fScore);
		} catch (Exception e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}
}
