package main;

import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;

import edu.washington.cs.knowitall.extractor.conf.BooleanFeatureSet;
import edu.washington.cs.knowitall.extractor.conf.LabeledBinaryExtraction;
import edu.washington.cs.knowitall.extractor.conf.LabeledBinaryExtractionReader;
import edu.washington.cs.knowitall.extractor.conf.ReVerbFeatures;
import edu.washington.cs.knowitall.extractor.conf.WekaDataSet;
import edu.washington.cs.knowitall.nlp.extraction.ChunkedBinaryExtraction;
import weka.classifiers.functions.Logistic;
import weka.core.SerializationHelper;
import weka.core.converters.ArffSaver;

/***
 * Used to train the ReVerb confidence function using the features described
 * by <code>ReVerbFeatures</code>. Given a set of <code>LabeledBinaryExtraction</code>
 * instances, this class featurizes them and trains a logistic regression classifier
 * using Weka's <code>Logistic</code> class. 
 * 
 * This class can be called from the command-line to train a classifier and save the
 * resulting model to a file.
 * 
 * @author afader
 *
 */
public class ReVerbClassifierTrainer {
	
	private static final String NATURAL_DISASTER = "NaturalDisaster";
	private BooleanFeatureSet<ChunkedBinaryExtraction> featureSet;
	private Logistic classifier;
	private WekaDataSet<ChunkedBinaryExtraction> dataSet;
	
	/**
	 * Constructs and trains a new Logistic classifier using the given examples.
	 * @param examples
	 * @throws Exception
	 */
	public ReVerbClassifierTrainer(Iterable<LabeledBinaryExtraction> examples) throws Exception {
		ReVerbFeatures feats = new ReVerbFeatures();
		featureSet = feats.getFeatureSet();
		createDataSet(examples);
		train();
	}
	
	/**
	 * @return the data set used to train the classifier
	 */
	public WekaDataSet<ChunkedBinaryExtraction> getDataSet() {
		return dataSet;
	}
	
	/**
	 * @return the trained classifier.
	 */
	public Logistic getClassifier() {
		return classifier;
	}
	
	private void createDataSet(Iterable<LabeledBinaryExtraction> examples) {
		dataSet = new WekaDataSet<ChunkedBinaryExtraction>("train", featureSet);
		for (LabeledBinaryExtraction extr : examples) {
			int label = extr.isPositive() ? 1 : 0;
			dataSet.addInstance(extr, label);
		}
	}

	private void train() throws Exception {
		classifier = new Logistic();
		classifier.buildClassifier(dataSet.getWekaInstances());
	}
	
	/**
	 * Trains a logistic regression classifier using the examples in the given file,
	 * and saves the model to disk. The examples must be in the format described in
	 * <code>LabeledBinaryExtractionReader</code>.
	 * 
	 * An optional third parameter can be passed that writes the training data in 
	 * Weka's ARFF file format to disk.
	 * 
	 * @param args
	 * @throws Exception
	 */
	public static void main(String[] args) throws Exception {
		
		String[] Datasets = Chunking.Datasets;

		
		for (int d = 0; d < Datasets.length; d++) {
			
			int[] split = Chunking.getSplits(Datasets[d]);
			
			for (int i = 0; i < split.length; i++) {
		
				System.err.println(Datasets[d] + " - " + split[i]);
				
				args = new String[2];
		
				args[0] = Chunking.getDirectory(Datasets[d]) + i + "/" + Datasets[d] + ".reverb.train";
				args[1] = Chunking.getDirectory(Datasets[d]) + i + "/" + Datasets[d] + ".reverb.model";
				
				
				if (args.length < 2) {
					System.err.println("Usage: ReVerbClassifierTrainer examples model [features]\n");
					System.err.println("    Trains the model used by ReVerbConfFunction on the given examples file and\n" +
									   "    writes them to the given model file. Optionally, will write out the \n" +
									   "    training data as a Weka ARFF file. The examples must be in the format\n" +
									   "    described in LabeledBinaryExtractionReader. The features used in the\n" +
									   "    classifier are described in ReVerbFeatures.\n");
					return;
				}
				InputStream in = new FileInputStream(args[0]);
				LabeledBinaryExtractionReader reader = new LabeledBinaryExtractionReader(in);
				ReVerbClassifierTrainer trainer = new ReVerbClassifierTrainer(reader.readExtractions());
				Logistic classifier = trainer.getClassifier();
				SerializationHelper.write(args[1], classifier);
				
				if (args.length > 2) {
					ArffSaver saver = new ArffSaver();
					saver.setInstances(trainer.getDataSet().getWekaInstances());
					saver.setFile(new File(args[2]));
					saver.writeBatch();
				}
			
			}

			
		}
		
	}

}
