import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;

import cs224n.util.Counter;
import cs224n.util.PriorityQueue;

public class NBClassifier extends AbstractClassifier {
	
	final static String[] classLabels = {
		"alt.atheism",
		"comp.graphics",
		"comp.os.ms-windows.misc",
		"jcomp.sys.ibm.pc.hardware",
		"comp.sys.mac.hardware",
		"comp.windows.x",
		"misc.forsale",
		"rec.autos",
		"rec.motorcycles",
		"rec.sport.baseball",
		"rec.sport.hockey",
		"sci.crypt",
		"sci.electronics",
		"sci.med",
		"sci.space",
		"soc.religion.christian",
		"talk.politics.guns",
		"talk.politics.mideast",
		"talk.politics.misc",
		"talk.religion.misc"
	};
	
	public enum FeatureSelectionMethod {
		NONE, CHI, KL, dKL
	}
	
	public enum ClassifierType {
		BINOMIAL, MULTINOMIAL, COMPLEMENT, WEIGHT_COMPLEMENT, TRANSFORMED_WEIGHT_COMPLEMENT
	}
	
	private static int CHI_CLASS_FEATURE_THRESHOLD = 300;
	private static int KL_FEATURE_THRESHOLD = 5403;

	protected static int UPWEIGHTING_RATIO = 1;
	
	protected Counter<Integer> classDocCounter;
	protected Counter<Integer> classWordCounter;
	protected Counter<String> termDocCounter;
	protected Counter<String> termWordCounter;
	protected Map<Integer, Counter<String>> classWordCounterMap;
	protected Map<Integer, Counter<String>> classDocCounterMap;
	protected Set<String> featureWords;
	protected Set<String> validFeatureWords;
	
    protected int totalDocCount = 0;
    protected double totalWordCount = 0;
    protected int featureTotalCount = 0;
    protected int classTotalCount = 0;
    
    double[] preProbs;
    
    protected FeatureSelectionMethod fsMethod;
    protected ClassifierType type;

	public NBClassifier(String dataFile, ClassifierType type, FeatureSelectionMethod fsMethod) throws Exception {
		super(dataFile);
		
		this.fsMethod = fsMethod;
		this.type = type;
	}
	
	public void initialize(int foldIndex, int foldCount) throws Exception {
		super.initialize(foldIndex, foldCount);
		
		classDocCounter = new Counter<Integer>();
	  	classWordCounter = new Counter<Integer>();
	  	termDocCounter = new Counter<String>();
	  	termWordCounter = new Counter<String>();
	  	classWordCounterMap = new HashMap<Integer, Counter<String>>();
	  	classDocCounterMap = new HashMap<Integer, Counter<String>>();
	  	featureWords = new HashSet<String>();
	  	validFeatureWords = new HashSet<String>();
	  	
	  	totalDocCount = 0;
	  	totalWordCount = 0;
	}
	
	
	@Override
	public void train() throws Exception {
		System.err.println("## NB TRAINING (" + (foldIndex+1) + "/" + foldCount + ") START ##");
		
		MessageFeatures mf = null;
		
		if(type == ClassifierType.TRANSFORMED_WEIGHT_COMPLEMENT){
			extraTrainForTWCNB();
		}
	
	  	while((mf = miw.getNextTrainingMessage()) != null){
	  		if(type == ClassifierType.TRANSFORMED_WEIGHT_COMPLEMENT){
	  			trainCountForTWCNB(mf);
	  		}else{
	  			trainCount(mf);
	  		}
	  	}
	  	
	  	/*
	  	// message counts for each class
	  	for(Integer c : classCounter.keySet())
	  		System.err.println("Class " + c + " : " + classCounter.getCount(c));
	  	*/
	  	
	  	featureTotalCount = featureWords.size();
	  	classTotalCount = classDocCounter.size();
	  	
	  	// chi-square feature selection
		Map<Integer, PriorityQueue<String>> featureValueMap = new HashMap<Integer, PriorityQueue<String>>();
	  	if(fsMethod == FeatureSelectionMethod.CHI)
	  	{
	  		for(int c=0; c<classTotalCount; c++)
	  		{
		  		double classDocCount = classDocCounter.getCount(c);
		  		double classWordCount = classWordCounter.getCount(c);
		  		
		  		Counter<String> docCounter = classDocCounterMap.get(c);
		  		Counter<String> wordCounter = classWordCounterMap.get(c);
		  		
		  		PriorityQueue<String> chiValues = new PriorityQueue<String>();
		  		
		  		for(String word : docCounter.keySet())
		  		{
		  			double A=0, B=0, C=0, D=0, N=0;
		  			
		  			if(type == ClassifierType.BINOMIAL)
		  			{
		  				N = totalDocCount;
			  			A = docCounter.getCount(word);
			  			B = termDocCounter.getCount(word) - A;
			  			C = classDocCount - A;
			  			D = N - A - B - C;
		  			}
		  			else if(type == ClassifierType.MULTINOMIAL || type == ClassifierType.COMPLEMENT 
		  					|| type == ClassifierType.WEIGHT_COMPLEMENT || type == ClassifierType.TRANSFORMED_WEIGHT_COMPLEMENT)
		  			{
		  				N = totalWordCount;
			  			A = wordCounter.getCount(word);
			  			B = termWordCounter.getCount(word) - A;
			  			C = classWordCount - A;
			  			D = N - A - B - C;
		  			}
		  			else
		  				throw new Exception("not suppported classifer");
		  			
		  			double chi = (N * (A*D - C*B) * (A*D - C*B)) / ((A+C)*(B+D)*(A+B)*(C+D));
		  			
		  			chiValues.add(word, chi);
		  		}
		  		
		  		//System.out.print(classLabels[c] + " : ");
		  		
		  		for(int i=0; i< CHI_CLASS_FEATURE_THRESHOLD && chiValues.hasNext(); i++)
		  		{
		  			String word = chiValues.next();
		  			if(i<20)
		  			{
			  			if(i==0)
				  			System.out.print(word);
			  			else
				  			System.out.print("\t" + word);
		  			}
		  			
		  			validFeatureWords.add(word);
		  		}
		  		
		  		System.out.print("\n");
		  		
		  		featureValueMap.put(c, chiValues);
	  		}
	  		
	  		
	  	}else if(fsMethod == FeatureSelectionMethod.KL || fsMethod == FeatureSelectionMethod.dKL){
	  		if(type != ClassifierType.MULTINOMIAL)
	  			throw new Exception("not suppported classifer");
	  			  			  		
	  		PriorityQueue<String> klValues = new PriorityQueue<String>();

	  		for(String word : termDocCounter.keySet())
	  		{
		  		double K_t = 0;
		  		double KL_t = 0;
		  		double q_t = 0;
		  		double p_t = 0;
	  			
	  			for(int c=0; c<classTotalCount; c++)
		  		{
			  		double classDocCount = classDocCounter.getCount(c);
			  		double classWordCount = classWordCounter.getCount(c);
			  		
			  		Counter<String> docCounter = classDocCounterMap.get(c);
			  		Counter<String> wordCounter = classWordCounterMap.get(c);
			  		
			  		if(docCounter.getCount(word)!=0)
			  			KL_t += -1 * (classDocCount / totalDocCount) * ( (wordCounter.getCount(word) + 1) / (classWordCount + featureTotalCount) )
			  						* Math.log(docCounter.getCount(word)/classDocCount);
			  		
			  		q_t += docCounter.getCount(word)/totalDocCount;
			  		
			  		p_t += (classDocCount / totalDocCount) * ( (wordCounter.getCount(word) + 1) / (classWordCount + featureTotalCount) );
			  		
		  		} 
	  				if(fsMethod == FeatureSelectionMethod.KL)
	  					K_t = -1 * termWordCounter.getCount(word) * Math.log(q_t);
	  				else
	  					K_t = -1 * p_t * Math.log(q_t); 
	  			
	  			System.err.println(K_t +"," + KL_t + "," +(K_t - KL_t));	
	  				
	  			klValues.add(word, K_t - KL_t);

	  		}
	  		
	  		for(int i=0 ; i< KL_FEATURE_THRESHOLD; i++)
	  			validFeatureWords.add(klValues.next());
	  	}
	  	
	  	/*
	  	// Synonym expansion
	  	{
	  		for(int c=0; c<classTotalCount; c++)
	  		{
		  		Counter<String> wordCounter = classWordCounterMap.get(c);
		  		Counter<String> newCounter = new Counter<String>();
		  		for(String word : wordCounter.keySet()){
		  			double count = wordCounter.getCount(word);
		  			for(String syn : WNDictionary.getInstance().getListOfSynonyms(word))
		  			{
		  				newCounter.incrementCount(syn, count);
		  				classWordCounter.incrementCount(c, count);
		  				featureWords.add(syn);
		  			}
		  		}
		  		
		  		wordCounter.incrementAll(newCounter);
	  		}
	  		
	  		featureTotalCount = featureWords.size();
	  	}
	  	*/
	  	
	  	if(fsMethod != FeatureSelectionMethod.NONE)
	  	{
	  		featureWords = validFeatureWords;
	  		featureTotalCount = validFeatureWords.size();
	  	}
	  	
	  	System.err.println("totalDocCount : " + totalDocCount);
	  	System.err.println("totalWordCount : " + totalWordCount);
	  	System.err.println("featureTotalCount : " + featureTotalCount);
	  	System.err.println("classTotalCount : " + classTotalCount);
	  	
	  	// precompute the probability
	  	preProbs = new double[classTotalCount];
	  	for(int c=0; c<classTotalCount; c++)
	  		preProbs[c] = 0;
	  	
	  	// only for Bernoulli model
	  	if(type == ClassifierType.BINOMIAL)
	  	{
		  	for(int c=0; c<classTotalCount; c++)
		  	{
		  		double prob = 0.0;
		  		double classDocCount = classDocCounter.getCount(c);
		  		
		  		Counter<String> docCounter = classDocCounterMap.get(c);
		  		
		  		for(String word : featureWords)
		  		{
		  			double docCount = docCounter.getCount(word);
		  			prob += Math.log(
	  					1 - (docCount + 1) / (classDocCount + 2)
		  			);
		  		}
		  		
		  		preProbs[c] = prob;
		  	}
	  	}
		
		System.err.println("## NB TRAINING (" + (foldIndex+1) + "/" + foldCount + ") END ##");
	}
	
	private void extraTrainForTWCNB() {  	
		MessageFeatures mf = null;
		while((mf = miw.getNextPreTrainingMessage()) != null){
			//int c = mf.newsgroupNumber;
			Set<String> words = new HashSet<String>();
			words.addAll(mf.subject.keySet());
			words.addAll(mf.body.keySet());
			for(String word : words){	
	  			termDocCounter.incrementCount(word);
	  		}
			totalDocCount++;
	  	}		
	}
	
	private void trainCount(MessageFeatures mf) throws Exception{
		int c = mf.newsgroupNumber;
  		
  		classDocCounter.incrementCount(c);
  		
  		if(!classWordCounterMap.containsKey(c))
  			classWordCounterMap.put(c, new Counter<String>());
  		
  		if(!classDocCounterMap.containsKey(c))
  			classDocCounterMap.put(c, new Counter<String>());
  	
			Counter<String> wordCounter = classWordCounterMap.get(c);
			Counter<String> docCounter = classDocCounterMap.get(c);
			
			int wordCount = 0;
			Set<String> words = new HashSet<String>();
			List<Counter<String>> counters = new ArrayList<Counter<String>>();
			
			counters.add(mf.subject);
			counters.add(mf.body);
			
			int i=0; 
			for(Counter<String> counter : counters)
			{
	  		for(String word : counter.keySet()){
	  			double count = counter.getCount(word);
	  			if(i == 0)
		  			wordCounter.incrementCount(word, UPWEIGHTING_RATIO * count);
	  			else
		  			wordCounter.incrementCount(word, count);
	  			termWordCounter.incrementCount(word, count);
	  			words.add(word);
	  			wordCount += (int) count;
	  			totalWordCount += (int) count;
	  		}
	  		i++;
			}
  		
  		for(String word : words)
  		{
  			docCounter.incrementCount(word);
  			termDocCounter.incrementCount(word);
  		}
  		
  		featureWords.addAll(words);
  		classWordCounter.incrementCount(c, wordCount);
  		
  		//System.err.println("SUBJECT : " + mf.subject);
  		
  		totalDocCount++;
  		
  		if(totalDocCount % 1000 == 0)
  			System.err.println("Processing training messages " + totalDocCount);
	}
	
	
	private void trainCountForTWCNB(MessageFeatures mf) throws Exception{
		int c = mf.newsgroupNumber;
  		
		if(!classDocCounterMap.containsKey(c))
  			classDocCounterMap.put(c, new Counter<String>());
  
  		classDocCounter.incrementCount(c);
  		
  		if(!classWordCounterMap.containsKey(c))
  			classWordCounterMap.put(c, new Counter<String>());
  		
			Counter<String> wordCounter = classWordCounterMap.get(c);
			Counter<String> docCounter = classDocCounterMap.get(c);

			double wordCount = 0;
			Set<String> words = new HashSet<String>();
			words.addAll(mf.subject.keySet());
			words.addAll(mf.body.keySet());
			
			Counter<String> tmpCounter = new Counter<String>();

			double sqSum = 0.0;
			for(String word : words){
	  			double count = UPWEIGHTING_RATIO*mf.subject.getCount(word) + mf.body.getCount(word);
	  			
	  			count = Math.log(count + 1.0);  // TF
				count *= Math.log(totalDocCount / termDocCounter.getCount(word));  // IDF 
				tmpCounter.setCount(word, count);
				sqSum += count*count;
			}
			
			double dLength = Math.sqrt(sqSum); 

			for(String word : words){
				double count = tmpCounter.getCount(word) / dLength;
		  		wordCounter.incrementCount(word,count);
		  		termWordCounter.incrementCount(word, count);
		  		wordCount += count;
		  		totalWordCount += count;		  		
		  		docCounter.incrementCount(word);
			}
	  		featureWords.addAll(words);
			classWordCounter.incrementCount(c, wordCount);
	}

	@Override
	public double test() throws Exception {
		System.err.println("## NB TESTING (" + (foldIndex+1) + "/" + foldCount + ") START ##");
		
		int totalCount = 0;
	  	int correctCount = 0;
	  	
	  	int curClass = -1;
	  	int curClassCount = 0;
	  	int totalSampleCount = 0;
	  	int correctSampleCount = 0;
	  	
		MessageFeatures mf = null;
		
		HashMap<Integer,Double> weightSumMap = new HashMap<Integer, Double>();
		
	  	while((mf = miw.getNextTestingMessage()) != null)
	  	{
	  		PriorityQueue<String> pq = new PriorityQueue<String>();
	  		double[] probs = new double[classTotalCount];
	  		
	  		for(int c=0; c<classTotalCount; c++)
	  		{
		  		double prob = preProbs[c];
		  		double classDocCount = classDocCounter.getCount(c);
		  		double classWordCount = classWordCounter.getCount(c);
		  		
		  		Counter<String> wordCounter = classWordCounterMap.get(c);
		  		Counter<String> docCounter = classDocCounterMap.get(c);
		  		
			  	prob += Math.log((double) classDocCounter.getCount(c) / totalDocCount);
			  	
	  			List<Counter<String>> counters = new ArrayList<Counter<String>>();
	  			Set<String> processedWords = new HashSet<String>();
	  			
	  			counters.add(mf.subject);
	  			counters.add(mf.body);
	  			
	  			//prepocess
	  			double totalTempWeight = 0;
	  			
	  			if(type == ClassifierType.WEIGHT_COMPLEMENT || type == ClassifierType.TRANSFORMED_WEIGHT_COMPLEMENT ){
		  			if(weightSumMap.get(c) == null){
	  					for(String word : featureWords)
				  		{	
				  			double wordCount = wordCounter.getCount(word);
	  						totalTempWeight += Math.abs( Math.log( 	
	  												(termWordCounter.getCount(word) - wordCount + 1) / 
	  												(totalWordCount - classWordCount + featureTotalCount) ));
				  		}
	  					weightSumMap.put(c, totalTempWeight);
	  					//System.err.println("totalTempWeight:"+totalTempWeight);
		  			}else{
		  				totalTempWeight = weightSumMap.get(c).doubleValue();
		  			}
		  			prob = 0;
	  			}
	  			
	  			int i=0; 

	  			for(Counter<String> counter : counters)
	  			{
			  		for(String word : counter.keySet())
			  		{
			  			if(fsMethod != FeatureSelectionMethod.NONE 
			  					&& !validFeatureWords.contains(word)) continue;
			  			
			  			double count = counter.getCount(word);
			  			double wordCount = wordCounter.getCount(word);
			  			double docCount = docCounter.getCount(word);
			  			
			  			if(i==0) count *= UPWEIGHTING_RATIO;
			  			
			  			if(type == ClassifierType.BINOMIAL)
			  			{
			  				if(!processedWords.contains(word) && featureWords.contains(word))
			  				{
					  			prob += (Math.log(
					  					(docCount + 1) / (classDocCount + 2)
					  			) - Math.log(
					  					1 - (docCount + 1) / (classDocCount + 2)
					  			));
					  			
					  			processedWords.add(word);
			  				}
			  			}
			  			else if(type == ClassifierType.MULTINOMIAL)
				  			prob += count * Math.log(
				  					(wordCount + 1) / (classWordCount + featureTotalCount)
				  			);
			  			else if(type == ClassifierType.COMPLEMENT)		  			
			  				prob -= count * Math.log(
				  					(termWordCounter.getCount(word) - wordCount + 1) / (totalWordCount - classWordCount + featureTotalCount)
				  			);
			  			else if(type == ClassifierType.WEIGHT_COMPLEMENT || type == ClassifierType.TRANSFORMED_WEIGHT_COMPLEMENT)
			  				prob -= count * Math.log(
				  					(termWordCounter.getCount(word) - wordCount + 1) / (totalWordCount - classWordCount + featureTotalCount)
				  					) / totalTempWeight;
			  			else
			  				throw new Exception("not supported classifier");
			  		}
			  		i++;
	  			}
		  		
		  		pq.add(Integer.toString(c), prob);
		  		probs[c] = prob;
	  		}
	  		
	  		if(curClass != mf.newsgroupNumber)
	  		{
	  			if(type == ClassifierType.MULTINOMIAL && curClass != -1)
	  				System.out.print("\n");
	  			curClass = mf.newsgroupNumber;
	  			curClassCount = 0;
	  		}
	  		
	  		String guessClass = pq.next();
	  		//System.err.println("Guess/Correct : " + guessClass + "/" + mf.newsgroupNumber);
	  		
	  		if(curClassCount < 20)
	  		{
	  			if(type == ClassifierType.BINOMIAL)
			  		NaiveBayesClassifier.outputProbability(probs);
	  			else if(type == ClassifierType.MULTINOMIAL)
	  				System.out.format("%d\t", Integer.parseInt(guessClass));
	  			
		  		if(Integer.parseInt(guessClass) == mf.newsgroupNumber)
		  			correctSampleCount++;
		  		
	  			totalSampleCount++;
	  		}
	  		
	  		if(Integer.parseInt(guessClass) == mf.newsgroupNumber)
	  			correctCount++;
	  	
	  		totalCount++;
	  		curClassCount++;
	  	}
	  	
		if(type == ClassifierType.MULTINOMIAL)
			System.out.print("\n");
	  	
	  	double accuracy = (double) correctCount / totalCount;
	  	double sampleAccuracy = (double) correctSampleCount / totalSampleCount;
	  	
	  	System.err.format("Accuracy : %2.2f (%d / %d)\n", accuracy*100, correctCount, totalCount);
	  	System.err.format("Accuracy (sample) : %2.2f (%d / %d)\n", sampleAccuracy*100, correctSampleCount, totalSampleCount);
		
		System.err.println("## NB TESTING (" + (foldIndex+1) + "/" + foldCount + ") END ##");
		
		return accuracy;
	}

}
