package maxEntClassifier;

import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.HashSet;
import opennlp.maxent.BasicEventStream;
import opennlp.maxent.GIS;
import opennlp.maxent.GISModel;
import opennlp.maxent.PlainTextByLineDataStream;
import opennlp.maxent.io.GISModelWriter;
import opennlp.maxent.io.SuffixSensitiveGISModelWriter;
import opennlp.model.AbstractModel;
import opennlp.model.EventStream;
import opennlp.model.GenericModelReader;
import featureSelection.FeatureSelection;
import featureSelection.FeatureStore;

public class MaxEntClassifier extends Classifier {

	public static void main() throws Exception {
		String sentence = "!_chair !_prerequisit !_homework !_appoint !_midterm !_game !_handout !_resum !_vita wisc project peopl miron septemb modifi student";
		outputWeights(sentence, "./out/model87.96.txt");
		System.out.println();
		System.out.println();
		outputWeights(sentence, "./out/model89.39.txt");
	}

	
	public static void main(String[] args) throws Exception {

		
		
		// //////////////////////////////////////////////
		FeatureSelection.ratio = 0.05;
		FeatureSelection.featureCount = -1;
		String dataSetDir = "./data/webkb/";
		FeatureStore.f_beta = 0.5;
		U.power = 2;
		
		String trainSet=dataSetDir+"train.txt";
		String testSet=dataSetDir+"test.txt";
		
		// ///////////////////////////////////////////////
		
		//U.changeDataSetToBinary(trainSet,trainSet+1);
		//U.changeDataSetToBinary(testSet,testSet+1);
		
		
		FeatureSelection fs = new FeatureSelection(trainSet);
		
		//HashSet<String> featureSet = fs.multiSortAddMfFeatureSelection(dataSetDir+"Ntrain.txt", dataSetDir+"Ntest.txt");
	
		HashSet<String> featureSet = fs.multiSortFeatureSelection();
//		HashSet<String> removeSet =U.vector2HashSet(U.setMinus(fs.fs.featureCount.keySet(), featureSet)); 
//		fs.printFeatures(featureSet, model, U.destop+"featureSet.txt");
//		fs.printFeatures(removeSet, model, U.destop+"removeSet.txt");
//		
//		fs.fs.checkConsistency(featureSet);
		
		R.featureSet = featureSet;
		
		System.out.println("feature count: " + featureSet.size() + "\n");
		

		
		MaxEntClassifier c = new MaxEntClassifier(trainSet,testSet);
		c.train();
		c.test();

	}

	public static void printminusFeature(FeatureSelection fs, GISModel model) {
		for (String p : fs.fs.predicateCount.keySet())
			for (String l : fs.fs.labelCount.keySet())
				if (model.getFeatureWeight(p, l) < 0)
					System.out.println(p + " : " + l);
	}

	public static GISModel model;

	public static void inference(String sentence, String modelFile)
			throws IOException {
		GISModel model = (GISModel) new GenericModelReader(new File(modelFile))
				.getModel();
		double[] ocs = model.eval(sentence.split(" "));
		int maxLabel = 0;
		double maxValue = 0;
		for (int i = 0; i < ocs.length; i++)
			if (maxValue < ocs[i]) {
				maxValue = ocs[i];
				maxLabel = i;
			}
		System.out.println(model.outcomeNames[maxLabel]);
	}

	public MaxEntClassifier(String _trainSet,String _testSet)  {
		super(_trainSet, _testSet);
		modelFile = "./out/model.txt";
		testOut = "./out/testOut.txt";
	}

	@Override
	public String inference(String[] features) throws IOException {
		// return new Predict(m).eval(features,false);
		double[] ocs = model.eval(features);
		int maxLabel = 0;
		double maxValue = 0;
		for (int i = 0; i < ocs.length; i++)
			if (maxValue < ocs[i]) {
				maxValue = ocs[i];
				maxLabel = i;
			}
		return model.outcomeNames[maxLabel];
	}

	@Override
	public void loadModel() throws IOException {
		model = (GISModel) new GenericModelReader(new File(modelFile))
				.getModel();
		outcomeNames = model.outcomeNames;
	}

	@Override
	public void train() throws IOException {

		String dataFileName = super.trainSet;
		String modelFileName = super.modelFile;
		FileReader datafr = new FileReader(new File(dataFileName));
		EventStream es = new BasicEventStream(new PlainTextByLineDataStream(
				datafr));

		GIS.SMOOTHING_OBSERVATION = SMOOTHING_OBSERVATION;
		AbstractModel model = GIS.trainModel(es, USE_SMOOTHING);
		File outputFile = new File(modelFileName);
		GISModelWriter writer = new SuffixSensitiveGISModelWriter(model,
				outputFile);
		writer.persist();
		loadModel();

	}

	public static void outputWeights(String sentence, String model)
			throws IOException {
		GISModel m = (GISModel) new GenericModelReader(new File(model))
				.getModel();

		String[] predictes = sentence.split(" ");
		String label = predictes[predictes.length - 1];
		// get context id
		int[] contexts = new int[predictes.length - 1];
		for (int i = 0; i < predictes.length - 1; i++) {
			Integer ci = m.pmap.get(predictes[i]);
			contexts[i] = ci == null ? -1 : ci;
		}

		// get y id
		int y = m.getOutcomeId(label);
		double sum = 0;
		for (int i = 0; i < contexts.length; i++) {
			double weight = m.getFeatureWeight(contexts[i], y);
			sum += weight;
			System.out.println(predictes[i] + "\t" + weight);
		}
		System.out.println();
		System.out.println("sum: " + sum);
	}

	public static double SMOOTHING_OBSERVATION = 0.1;
	public static boolean USE_SMOOTHING = false;

}
