package iitb.Model;

import gnu.trove.TIntHashSet;
import iitb.CRF.CRF;
import iitb.CRF.DataIter;
import iitb.CRF.DataSequence;
import iitb.CRF.Feature;
import iitb.CRF.FeatureGeneratorNested;
import iitb.CRF.SegmentDataSequence;

import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.StringTokenizer;
import java.util.Vector;

import su.FeatureSelection.FeatureSelection;
import su.FeatureSelection.FeatureStore;
import su.FeatureSelection.KeyValuePair;
import su.Ner.Names;
import su.Ner.NerDataSequence;
import su.Ner.U;
import su.NewFeatureType.CurrentCaseFeatures;
import su.NewFeatureType.LocationDictFeatures;
import su.NewFeatureType.OrganizationDictFeatures;
import su.NewFeatureType.PersonDictFeatures;
import su.NewFeatureType.PreviousWordFeatures;
import su.NewFeatureType.StateFeatures;
import su.NewFeatureType.TitleDictFeatures;
import su.NewFeatureType.TransitionFeatures;

/**
 * The FeatureGenerator is an aggregator over all these different feature types.
 * You can inherit from the FeatureGenImpl class and after calling one of the
 * constructors that does not make a call to (addFeatures()) you can then
 * implement your own addFeatures class. There you will typically add the
 * EdgeFeatures feature first and then the rest. So, for example if you wanted
 * to add some parameter for each label (like a prior), you can create a new
 * FeatureTypes class that will create as many featureids as the number of
 * labels. You will have to create a new class that is derived from
 * FeatureGenImpl and just have a different implementation of the addFeatures
 * subroutine. The rest will be handled by the parent class. This class is
 * responsible for converting the string-ids that the FeatureTypes assign to
 * that will make one pass over the training data and create the map of
 * featurenames->integer id and as a side effect count the number of features.
 * 
 * @author Sunita Sarawagi
 * */

@SuppressWarnings("serial")
public class FeatureGenImpl implements FeatureGeneratorNested {

	public DataIter trainningSet;
	public static boolean addWholeFeature;
	public HashMap<String, Double> minPredicateWeight;

	protected void addFeatures() {
		addFeature(new EdgeFeatures(this));
		addFeature(new StartFeatures(this));
		addFeature(new EndFeatures(this));
		addFeature(new WordFeatures(this, dict));

		// by su
		int baseId = -1;
		addFeature(new CurrentCaseFeatures(this, baseId));
		baseId -= CurrentCaseFeatures.maxFeatureNum;

		PersonDictFeatures p_TypeFeatures = new PersonDictFeatures(this, baseId);
		addFeature(p_TypeFeatures);
		baseId -= p_TypeFeatures.maxFeatureNum;

		LocationDictFeatures l_TypeFeatures = new LocationDictFeatures(this,
				baseId);
		addFeature(l_TypeFeatures);
		baseId -= l_TypeFeatures.maxFeatureNum;

		OrganizationDictFeatures o_TypeFeatures = new OrganizationDictFeatures(
				this, baseId);
		addFeature(o_TypeFeatures);
		baseId -= o_TypeFeatures.maxFeatureNum;

		TitleDictFeatures tFeatures = new TitleDictFeatures(this, baseId);
		addFeature(tFeatures);
		baseId -= tFeatures.maxFeatureNum;

		baseId += addSuFeatures(new PreviousWordFeatures(this, dict, baseId));
		//baseId += addSuFeatures(new NextWordFeatures(this, dict, baseId));
		//baseId += addTransitionFeatures(new PS_CCFeatures(this, baseId));
		

	}

	public Vector features;
	transient Iterator featureIter;
	protected FeatureTypes currentFeatureType;
	protected FeatureImpl featureToReturn, feature;
	public Model model;
	int numFeatureTypes = 0;
	int totalFeatures;
	boolean _fixedTransitions = true;
	public boolean generateOnlyXFeatures = false;
	public boolean addOnlyTrainFeatures = true;
	TIntHashSet retainedFeatureTypes = new TIntHashSet(); // all features of
	// this type are
	// retained.

	transient DataSequence data;
	int cposEnd;
	int cposStart;
	WordsInTrain dict;

	// modified by su 1.25 2011
	public FeatureSelection featureSelection;
	public Hashtable<String, Integer> featureCountTable = new Hashtable<String, Integer>();
	public FeatureStore featureStore;
	
	// end modified by su 1.25 2011

	// by su
	public Names names = new Names("./data/");
	public static String recordTxt = "./out/lambda.txt";
	public static String featureCluster = "./out/featureCluster.txt";
	Hashtable<String, Integer> YCount;
	// by su

	public void addFeature(FeatureTypes fType) {
		addFeature(fType, false);
	}

	public void addFeature(FeatureTypes fType, boolean retainThis) {
		features.add(fType);
		if (retainThis)
			retainedFeatureTypes.add(fType.getTypeId() + 1);
		if (!fType.fixedTransitionFeatures())
			_fixedTransitions = false;
	}

	public void setDict(WordsInTrain d) {
		dict = d;
	}

	public WordsInTrain getDict() {
		if (dict == null)
			dict = new WordsInTrain();
		return dict;
	}

	protected FeatureTypes getFeature(int i) {
		return (FeatureTypes) features.elementAt(i);
	}

	protected boolean keepFeature(DataSequence seq, FeatureImpl f) {
		if ((retainedFeatureTypes != null)
				&& (retainedFeatureTypes.contains(currentFeatureType
						.getTypeId() + 1)))
			return true;
		return retainFeature(seq, f);
	}

	protected boolean retainFeature(DataSequence seq, FeatureImpl f) {
		return ((seq.y(cposEnd) == f.y()) && ((cposStart == 0)
				|| (f.yprev() < 0) || (seq.y(cposStart - 1) == f.yprev())));
	}

	boolean featureCollectMode = false;

	public class FeatureMap implements Serializable {
		public Hashtable strToInt = new Hashtable();
		public FeatureIdentifier idToName[];
		public FeatureGenImpl fgen;

		FeatureMap(FeatureGenImpl _fgen) {
			featureCollectMode = true;
			fgen = _fgen;
		}

		public int getId(FeatureImpl f) {

			int id = getId(f.identifier());

			// modified by su 1.25 2011
			// if (featureCollectMode
			// && (!addOnlyTrainFeatures || keepFeature(data, f))) {
			//
			// // addToFeatureCountTable(f);
			// }
			// end modified by su 1.25 2011

			if ((id < 0) && featureCollectMode
					&& (!addOnlyTrainFeatures || keepFeature(data, f)))
				return add(f);
			return id;
		}

		private int getId(Object key) {
			if (strToInt.get(key) != null) {
				return ((Integer) strToInt.get(key)).intValue();
			}
			return -1;
		}

		public int add(FeatureImpl feature) {
			int newId = strToInt.size();
			strToInt.put(feature.identifier().clone(), new Integer(newId));
			return newId;
		}

		void freezeFeatures() throws Exception {

			implementFeatureCountTable();
			loadYCount();
			featureStore = new FeatureStore(featureCountTable,YCount);
			strToInt = new FeatureSelection(featureStore).featureSelection();

			featureCollectMode = false;
			idToName = new FeatureIdentifier[strToInt.size()];

			for (Enumeration e = strToInt.keys(); e.hasMoreElements();) {
				Object key = e.nextElement();
				idToName[getId(key)] = (FeatureIdentifier) key;
			}
			totalFeatures = strToInt.size();
		}

		public int collectFeatureIdentifiers(DataIter trainData, int maxMem)
				throws Exception {
			for (trainData.startScan(); trainData.hasNext();) {
				DataSequence seq = trainData.next();
				addTrainRecord(seq);
			}

			freezeFeatures();
			return strToInt.size();
		}

		public void write(PrintWriter out) throws IOException {
			out.println("******* Features ************");
			out.println(strToInt.size());
			for (Enumeration e = strToInt.keys(); e.hasMoreElements();) {
				Object key = e.nextElement();
				out.println(key + " "
						+ ((Integer) strToInt.get(key)).intValue());
			}
		}

		public int read(BufferedReader in) throws Exception {
			in.readLine();
			int len = Integer.parseInt(in.readLine());
			String line;
			for (int l = 0; (l < len) && ((line = in.readLine()) != null); l++) {
				StringTokenizer entry = new StringTokenizer(line, " ");
				FeatureIdentifier key = new FeatureIdentifier(entry.nextToken());
				int pos = Integer.parseInt(entry.nextToken());
				strToInt.put(key, new Integer(pos));
			}
			freezeFeatures();
			return strToInt.size();
		}

		public FeatureIdentifier getIdentifier(int id) {
			return idToName[id];
		}

		public String getName(int id) {
			return idToName[id].toString();
		}

		void implementFeatureCountTable() {
			for (Object key : strToInt.keySet()) {
				if (!featureCountTable.keySet().contains(key.toString()))
					featureCountTable.put(key.toString(), 0);
			}
		}
	};

	public FeatureMap featureMap;

	static Model getModel(String modelSpecs, int numLabels) throws Exception {
		// create model..
		return Model.getNewModel(numLabels, modelSpecs);
	}

	public FeatureGenImpl(String modelSpecs, int numLabels, DataIter _trainFile)
			throws Exception {
		this(modelSpecs, numLabels, true, _trainFile);
	}

	public FeatureGenImpl(String modelSpecs, int numLabels,
			boolean addFeatureNow, DataIter _trainFile) throws Exception {
		this(getModel(modelSpecs, numLabels), numLabels, addFeatureNow,
				_trainFile);
	}

	public FeatureGenImpl(Model m, int numLabels, boolean addFeatureNow,
			DataIter _trainFile) throws Exception {
		model = m;
		features = new Vector();
		featureToReturn = new FeatureImpl();
		feature = new FeatureImpl();
		featureMap = new FeatureMap(this);
		trainningSet = _trainFile;
		dict = new WordsInTrain();
		dict.train(trainningSet, model.numStates());
		if (addFeatureNow)
			addFeatures();
	}

	public boolean stateMappings(DataIter trainData) throws Exception {
		if (model.numStates() == model.numberOfLabels())
			return false;
		for (trainData.startScan(); trainData.hasNext();) {
			DataSequence seq = trainData.next();
			if (seq instanceof SegmentDataSequence) {
				model.stateMappings((SegmentDataSequence) seq);
			} else {
				model.stateMappings(seq);
			}
		}
		return true;
	}

	public boolean mapStatesToLabels(DataSequence data) {
		if (model.numStates() == model.numberOfLabels())
			return false;
		if (data instanceof SegmentDataSequence) {
			model.mapStatesToLabels((SegmentDataSequence) data);
		} else {
			for (int i = 0; i < data.length(); i++) {
				data.set_y(i, label(data.y(i)));
			}
		}
		return true;
	}

	public int maxMemory() {
		return 1;
	}

	public boolean train() throws Exception {
		return train(trainningSet, true);
	}

	public boolean train(DataIter trainData, boolean cachedLabels)
			throws Exception {
		return train(trainData, cachedLabels, true);
	}

	public boolean labelMappingNeeded() {
		return model.numStates() != model.numberOfLabels();
	}

	public boolean train(DataIter trainData, boolean cachedLabels,
			boolean collectIds) throws Exception {
		totalFeatures = featureMap.collectFeatureIdentifiers(trainData,
				maxMemory());
		return true;
	};

	/**
	 * @param seq
	 */
	public void addTrainRecord(DataSequence seq) {
		for (int l = 0; l < seq.length(); l++) {
			for (startScanFeaturesAt(seq, l); hasNext();) {
				FeatureImpl f = (FeatureImpl) next();
				int i = cposEnd;
				int yp = f.y();
				int yprev = f.yprev();
				if ((data.y(i) == yp)
						&& (((i - 1 >= 0) && (yprev == data.y(i - 1))) || (yprev < 0)))
					addToFeatureCountTable(f);
			}
		}
	}

	public void printStats() {
		System.out.println("Num states " + model.numStates());
		System.out.println("Num edges " + model.numEdges());
		if (dict != null)
			System.out.println("Num words in dictionary "
					+ dict.dictionaryLength());
		System.out.println("Num features " + numFeatures());
	}

	protected FeatureImpl nextNoId() {
		feature.copy(featureToReturn);
		advance(false);
		return feature;
	}

	protected void advance() {
		advance(!featureCollectMode);
	}

	protected void advance(boolean returnWithId) {
		while (true) {
			for (; ((currentFeatureType == null) || !currentFeatureType
					.hasNext())
					&& featureIter.hasNext();) {
				currentFeatureType = (FeatureTypes) featureIter.next();
			}
			if (!currentFeatureType.hasNext())
				break;
			while (currentFeatureType.hasNext()) {
				featureToReturn.init();
				copyNextFeature(featureToReturn);

				featureToReturn.id = featureMap.getId(featureToReturn);

				if (featureToReturn.id < 0) {
					if (!CRF.isInInferenceState || !addWholeFeature)
						continue;
					else {
						featureToReturn.id = Integer.MAX_VALUE;
					}
				}
				if (featureValid(data, cposStart, cposEnd, featureToReturn,
						model))
					return;

			}
		}
		featureToReturn.id = -1;
	}

	/**
	 * @param featureToReturn
	 */
	protected void copyNextFeature(FeatureImpl featureToReturn) {
		currentFeatureType.next(featureToReturn);
	}

	/**
	 * @param featureToReturn
	 * @param cposEnd
	 * @param cposStart
	 * @param data
	 * @return
	 */
	public static boolean featureValid(DataSequence data, int cposStart,
			int cposEnd, FeatureImpl featureToReturn, Model model) {
		if (((cposStart > 0) && (cposEnd < data.length() - 1))
				|| (featureToReturn.y() >= model.numStates())
				|| (featureToReturn.yprev() >= model.numStates()))
			return true;
		if ((cposStart == 0)
				&& (model.isStartState(featureToReturn.y()))
				&& ((data.length() > 1) || (model.isEndState(featureToReturn
						.y()))))
			return true;
		if ((cposEnd == data.length() - 1)
				&& (model.isEndState(featureToReturn.y())))
			return true;
		return false;
	}

	protected void initScanFeaturesAt(DataSequence d) {
		data = d;
		currentFeatureType = null;
		featureIter = features.iterator();
		advance();
	}

	public void startScanFeaturesAt(DataSequence d, int prev, int p) {
		cposEnd = p;
		cposStart = prev + 1;
		for (int i = 0; i < features.size(); i++) {
			getFeature(i).startScanFeaturesAt(d, prev, cposEnd);
		}
		initScanFeaturesAt(d);
	}

	public void startScanFeaturesAt(DataSequence d, int p) {
		cposEnd = p;
		cposStart = p;
		for (int i = 0; i < features.size(); i++) {
			getFeature(i).startScanFeaturesAt(d, cposEnd);
		}
		initScanFeaturesAt(d);
	}

	public boolean hasNext() {
		return (featureToReturn.id >= 0);
	}

	public Feature next() {
		feature.copy(featureToReturn);
		advance();
		return feature;
	}

	public void freezeFeatures() throws Exception {
		if (featureCollectMode)
			featureMap.freezeFeatures();
	}

	public int numFeatures() {
		return totalFeatures;
	}

	public FeatureIdentifier featureIdentifier(int id) {
		return featureMap.getIdentifier(id);
	}

	public String featureName(int featureIndex) {
		return featureMap.getName(featureIndex);
	}

	public int numStates() {
		return model.numStates();
	}

	public int label(int stateNum) {
		return model.label(stateNum);
	}

	protected int numFeatureTypes() {
		return features.size();
	}

	public void read(String fileName) throws Exception {
		BufferedReader in = new BufferedReader(new FileReader(fileName));
		if (dict != null)
			dict.read(in, model.numStates());
		totalFeatures = featureMap.read(in);
	}

	public void write(String fileName) throws IOException {
		PrintWriter out = new PrintWriter(new FileOutputStream(fileName));
		if (dict != null)
			dict.write(out);
		featureMap.write(out);
		out.close();
	}

	public void displayModel(double featureWts[]) throws IOException {
		displayModel(featureWts, System.out);
	}

	public void displayModel(double featureWts[], PrintStream out)
			throws IOException {
		for (int fIndex = 0; fIndex < featureWts.length; fIndex++) {
			Object feature = featureIdentifier(fIndex).name;
			int classIndex = featureIdentifier(fIndex).stateId;
			int label = model.label(classIndex);
			out.println(feature + " " + label + " " + classIndex + " "
					+ featureWts[fIndex]);
		}
		/*
		 * out.println("Feature types statistics"); for (int f = 0; f <
		 * features.size(); f++) { getFeature(f).print(featureMap, featureWts);
		 * }
		 */
	}

	public boolean fixedTransitionFeatures() {
		return _fixedTransitions;
	}

	public void printFeatureCountTable() throws IOException {

		BufferedWriter bw = new BufferedWriter(new FileWriter(recordTxt));
		for (String s : featureCountTable.keySet())
			bw.write(s + "\t" + featureCountTable.get(s) + "\n");
		// System.out.println(s + "\t" + featureCountTable.get(s));
		bw.close();
	}

	public void printStringToInt_FeatureCountTable(double featureWts[])
			throws IOException {
		BufferedWriter bw = new BufferedWriter(new FileWriter(recordTxt));
		FeatureStore fs = new FeatureStore(featureCountTable,YCount);
		for (Object key : featureMap.strToInt.keySet()) {
			String s = key.toString();
			double fw = featureWts[(Integer) featureMap.strToInt.get(key)];
			int featureCount = (int) FeatureStore.tryGetI(fs.featureCount, s);
			double fCp = FeatureStore.tryGetD(fs.featurePrecision, s);
			double fCp_M_count = FeatureStore.tryGetD(fs.featurePrecision, s)
					* FeatureStore.tryGetI(fs.featureCount, s);
			bw.write(U.removeId(s) + "\t" + fw + "\t" + featureCount + "\t"
					+ fCp + "\t" + fCp_M_count + "\n");
		}
		bw.close();
	}

	public void printClusteredFeatures() throws IOException {
		
		BufferedWriter bw = new BufferedWriter(new FileWriter(featureCluster));
		bw.write("featureCount: "+featureMap.strToInt.size()+"\n");
		FeatureStore fs = new FeatureStore(featureCountTable,YCount);

		Vector<ArrayList<KeyValuePair>> v = new Vector<ArrayList<KeyValuePair>>(
				NerDataSequence.labelNum);
		for (int i = 0; i < NerDataSequence.labelNum; i++)
			v.add(new ArrayList<KeyValuePair>());

		for (Object key : featureMap.strToInt.keySet()) {
			String f = key.toString();
			v.get(FeatureStore.getFeatureY(f)).add(
					new KeyValuePair(f, FeatureStore.tryGetD(
							fs.featureFMeasure, f)));
		}

		for (int i = 0; i < NerDataSequence.labelNum; i++) {
			ArrayList<KeyValuePair> al = v.get(i);
			U.sort(al);

			bw.write("__________________________________" + "\n");
			bw.write("label:" + i + "\n\n");
			for (KeyValuePair kvp : al)
				bw.write(U.removeId(kvp.key) + "\t"
						+ featureStore.featurePrecision.get(kvp.key) + "\t"
						+ featureStore.featureRecall.get(kvp.key) + "\t"
						+ kvp.value +"\t"
						+ featureStore.featureCount.get(kvp.key) +
				
				"\n");
			bw.write("\n");
		}
		bw.close();
	}

	void addToFeatureCountTable(FeatureImpl f) {
		int count = 0;
		String featureIdentifier = f.identifier().toString();
		if (null != featureCountTable.get(featureIdentifier))
			count = featureCountTable.get(featureIdentifier);
		count++;
		featureCountTable.put(featureIdentifier, count);

	}

	public void loadMinPredicateWeight(double[] lambda) {
		minPredicateWeight = new HashMap<String, Double>();

		for (String predicate : featureStore.predicateCount.keySet()) {
			minPredicateWeight.put(predicate, 0.0);
		}

		for (String feature : featureStore.featureCount.keySet()) {
			FeatureIdentifier fi = new FeatureIdentifier(feature);
			if (null == (Integer) featureMap.strToInt.get(fi))
				continue;
			double val = lambda[(Integer) featureMap.strToInt.get(fi)];
			String predicate = U.getFeaturePredicate(feature);
			if (val < minPredicateWeight.get(predicate))
				minPredicateWeight.put(predicate, val);
		}

	}

	int addSuFeatures(StateFeatures suFeatures) {
		addFeature(suFeatures);
		return -suFeatures.maxFeatureNum;
	}
	
	int addTransitionFeatures(TransitionFeatures f) {
		addFeature(f);
		return -f.maxFeatureNum;
	}
	
	void loadYCount(){
		YCount=new Hashtable<String, Integer>();
		for (trainningSet.startScan(); trainningSet.hasNext();) {
			DataSequence seq = trainningSet.next();
			for (int l = 0; l < seq.length(); l++) {
				U.add1(YCount, seq.y(l)+"");
			}
		}
	}
};
