import java.io.FileWriter;
import java.io.PrintWriter;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.Vector;

import cs224n.util.Counter;
import cs224n.util.PriorityQueue;
import libsvm.*;

public class SVMClassifier extends AbstractClassifier {
	public enum FeatureSelectionMethod {
		NONE, CHI
	}
	
	private static final String MODEL_FILE = "svm.model";
	private static int CHI_FEATURE_THRESHOLD = 10;
	
	private svm_parameter param;		
	private svm_problem prob;		
	private svm_model model;
	private Map<String,Integer> featureIndex;
	
	// for feature selection only
	protected Counter<Integer> classDocCounter;
	protected Counter<Integer> classWordCounter;
	protected Counter<String> termDocCounter;
	protected Counter<String> termWordCounter;
	protected Map<Integer, Counter<String>> classWordCounterMap;
	protected Map<Integer, Counter<String>> classDocCounterMap;
	private Set<String> featureWords;
	protected Set<String> validFeatureWords;
	
	protected int totalDocCount = 0;
    protected int totalWordCount = 0;
    protected int featureTotalCount = 0;
    protected int classTotalCount = 0;
	
	protected FeatureSelectionMethod fsMethod;

	public SVMClassifier(String dataFile, FeatureSelectionMethod fsMethod) throws Exception {
		super(dataFile);
		
		this.fsMethod = fsMethod;
	}
	
	public void initialize(int foldIndex, int foldCount) throws Exception {
		super.initialize(foldIndex, foldCount);
		
		param = new svm_parameter();
		
		// default values
		param.svm_type = svm_parameter.C_SVC;
		param.kernel_type = svm_parameter.RBF;
		param.degree = 3;
		param.gamma = 0;	// 1/num_features
		param.coef0 = 0;
		param.nu = 0.5;
		param.cache_size = 100;
		param.C = 100;
		param.eps = 1e-3;
		param.p = 0.1;
		param.shrinking = 1;
		param.probability = 0;
		param.nr_weight = 0;
		param.weight_label = new int[0];
		param.weight = new double[0];
		
		svm_print_interface print_func = null;
		svm.svm_set_print_string_function(print_func);
		
		classDocCounter = new Counter<Integer>();
	  	classWordCounter = new Counter<Integer>();
	  	termDocCounter = new Counter<String>();
	  	termWordCounter = new Counter<String>();
	  	classWordCounterMap = new HashMap<Integer, Counter<String>>();
	  	classDocCounterMap = new HashMap<Integer, Counter<String>>();
	  	featureWords = new HashSet<String>();
	  	validFeatureWords = new HashSet<String>();
	  	
	  	totalDocCount = 0;
	  	totalWordCount = 0;
	  	
		featureIndex = new HashMap<String, Integer>();
		
	}

	@Override
	public void train() throws Exception {
		System.err.println("## SVM TRAINING (" + foldIndex + "/" + foldCount + ") START ##");
		
		MessageFeatures mf = null;
		
	  	while((mf = miw.getNextTrainingMessage()) != null)
	  	{
	  		int c = mf.newsgroupNumber;
	  		
	  		// for feature selection only (below)
	  		classDocCounter.incrementCount(c);
	  		
	  		if(!classWordCounterMap.containsKey(c))
	  			classWordCounterMap.put(c, new Counter<String>());
	  		
	  		if(!classDocCounterMap.containsKey(c))
	  			classDocCounterMap.put(c, new Counter<String>());
	  	
  			Counter<String> wordCounter = classWordCounterMap.get(c);
  			Counter<String> docCounter = classDocCounterMap.get(c);
  			
  			int wordCount = 0;
  			Set<String> words = new HashSet<String>();
  			
  			Counter<String> counter = new Counter<String>();
	  		counter.incrementAll(mf.subject);
	  		counter.incrementAll(mf.body);
  			
	  		for(String word : counter.keySet()){
	  			double count = counter.getCount(word);
	  			wordCounter.incrementCount(word, count);
	  			termWordCounter.incrementCount(word, count);
	  			words.add(word);
	  			wordCount++;
	  			totalWordCount += (int) count;
	  		}
	  		
	  		for(String word : words)
	  		{
	  			docCounter.incrementCount(word);
	  			termDocCounter.incrementCount(word);
	  		}
	  		
	  		featureWords.addAll(words);
	  		classWordCounter.incrementCount(c, wordCount);
	  		
	  		totalDocCount++;
	  	}
	  	
	  	classTotalCount = classDocCounter.size();
	  	
	  	// chi-square feature selection
		Map<Integer, PriorityQueue<String>> chiValueMap = new HashMap<Integer, PriorityQueue<String>>();
	  	if(fsMethod == FeatureSelectionMethod.CHI)
	  	{
	  		for(int c=0; c<classTotalCount; c++)
	  		{
		  		double classDocCount = classDocCounter.getCount(c);
		  		//double classWordCount = classWordCounter.getCount(c);
		  		
		  		Counter<String> docCounter = classDocCounterMap.get(c);
		  		//Counter<String> wordCounter = classWordCounterMap.get(c);
		  		
		  		PriorityQueue<String> chiValues = new PriorityQueue<String>();
		  		
		  		for(String word : docCounter.keySet())
		  		{
		  			double A=0, B=0, C=0, D=0, N=0;;
		  			
		  			// for binomial
		  			{
		  				N = totalDocCount;
			  			A = docCounter.getCount(word);
			  			B = termDocCounter.getCount(word) - A;
			  			C = classDocCount - A;
			  			D = N - A - B - C;
		  			}
		  			/*
		  			// for multinomial
		  			{
		  				N = totalWordCount;
			  			A = wordCounter.getCount(word);
			  			B = termWordCounter.getCount(word) - A;
			  			C = classWordCount - A;
			  			D = N - A - B - C;
		  			}
		  			*/
		  			
		  			double chi = (N * (A*D - C*B) * (A*D - C*B)) / ((A+C)*(B+D)*(A+B)*(C+D));
		  			
		  			chiValues.add(word, chi);
		  		}
		  		
		  		for(int i=0; i< CHI_FEATURE_THRESHOLD && chiValues.hasNext(); i++)
		  		{
		  			String word = chiValues.next();
		  			if(i<20)
		  			{
			  			if(i==0)
				  			System.out.print(word);
			  			else
				  			System.out.print("\t" + word);
		  			}
		  			
		  			validFeatureWords.add(word);
		  		}
		  		
		  		System.out.print("\n");
		  		
		  		chiValueMap.put(c, chiValues);
	  		}
	  	}
	  	
	  	if(fsMethod == FeatureSelectionMethod.CHI)
	  		featureWords = validFeatureWords;
	  	
	  	featureTotalCount = featureWords.size();
	  	
	  	int fi=0;
	  	for(String word : featureWords)
	  		featureIndex.put(word, fi++);
	  	
	  	System.err.println("totalDocCount : " + totalDocCount);
	  	System.err.println("totalWordCount : " + totalWordCount);
	  	System.err.println("featureTotalCount : " + featureTotalCount);
	  	System.err.println("classTotalCount : " + classTotalCount);
	  
	  	miw.initialize();
		
		Vector<Double> vy = new Vector<Double>();
		Vector<svm_node[]> vx = new Vector<svm_node[]>();
		int maxIndex = 0;
		
	  	while((mf = miw.getNextTrainingMessage()) != null)
	  	{
	  		int c = mf.newsgroupNumber;
	  		
	  		vy.addElement((double) c);
	  		
	  		Counter<String> counter = new Counter<String>();
	  		counter.incrementAll(mf.subject);
	  		counter.incrementAll(mf.body);
  			
	  		int validCount = 0;
	  		for(String word : counter.keySet())
	  			if(featureIndex.containsKey(word)) validCount++;
	  		
			svm_node[] x = new svm_node[validCount];
  			
			int i=0; 
	  		for(String word : counter.keySet())
	  		{
	  			if(!featureIndex.containsKey(word)) continue;
	  			
	  			int index = featureIndex.get(word);
				x[i] = new svm_node();
				x[i].index = index;
				x[i].value = counter.getCount(word);
				
				i++;
	  		}
	  		
	  		vx.addElement(x);
	  	}
	  	
	  	maxIndex = featureWords.size() - 1;
	  	
		prob = new svm_problem();
		prob.l = vy.size();
		prob.x = new svm_node[prob.l][];
		for(int i=0;i<prob.l;i++)
			prob.x[i] = vx.elementAt(i);
		
		prob.y = new double[prob.l];
		for(int i=0;i<prob.l;i++)
			prob.y[i] = vy.elementAt(i);

		if(param.gamma == 0 && maxIndex > 0)
			param.gamma = 1.0/maxIndex;
		
		if(param.kernel_type == svm_parameter.PRECOMPUTED)
			for(int i=0;i<prob.l;i++)
			{
				if (prob.x[i][0].index != 0)
				{
					System.err.print("Wrong kernel matrix: first column must be 0:sample_serial_number\n");
					System.exit(1);
				}
				if ((int)prob.x[i][0].value <= 0 || (int)prob.x[i][0].value > maxIndex)
				{
					System.err.print("Wrong input format: sample_serial_number out of range\n");
					System.exit(1);
				}
			}
		
		// print data for Weka
		{
			System.err.println("## SVM INPUT PRINT START ##");
			PrintWriter out = new PrintWriter(new FileWriter("data.libsvm"));
			int count = 0;
			int curC = -1;
			for(int i=0; i<prob.l; i++)
			{
				int c = (int) prob.y[i];
				if(c!=curC){
					curC = c;
					count = 0;
				}
				if(count++ >= 50) continue;
				
				out.format("%d ", (int) prob.y[i]);
				for(int j=0; j<prob.x[i].length; j++)
					out.format("%d:%f ", prob.x[i][j].index, prob.x[i][j].value);
				out.write("\n");
			}
			out.close();
			System.err.println("## SVM INPUT PRINT END ##");
		}
		{
			System.err.println("## ARFF PRINT START ##");
			PrintWriter out = new PrintWriter(new FileWriter("data.arff"));
			
			out.print("@RELATION newsgroup\n\n");
			for(String word : featureWords)
				out.print("@ATTRIBUTE " + word + "\t NUMERIC\n");
			out.print("@ATTRIBUTE class\t{");
			for(int i=0; i<classTotalCount; i++)
				if(i==0)
					out.format("%d",i);
				else
					out.format(",%d",i);
			out.print("}\n\n");
			
			out.print("@DATA\n");
			
			int count = 0;
			int curC = -1;
			
			final class Pair {
				public int index;
				public double value;
				
				public Pair(int index, double value)
				{
					this.index = index;
					this.value = value;
				}
			};
			
			PriorityQueue<Pair> pq = new PriorityQueue<Pair>();
			for(int i=0; i<prob.l; i++)
			{
				int c = (int) prob.y[i];
				if(c!=curC){
					curC = c;
					count = 0;
				}
				//if(count++ >= 50) continue;
				
				for(int j=0; j<prob.x[i].length; j++)
					pq.add(new Pair(prob.x[i][j].index, prob.x[i][j].value), (double) -1.0 * prob.x[i][j].index);
				
				out.format("{", (int) prob.y[i]);
				while(pq.hasNext())
				{
					Pair p = pq.next();
					out.format("%d %f,", Math.abs(p.index), p.value);
				}
				out.format("%d %d", featureTotalCount, c);
				out.write("}\n");
			}
			out.close();
			System.err.println("## ARFF PRINT END ##");
		}
		
		model = svm.svm_train(prob, param);
		svm.svm_save_model(MODEL_FILE,model);

		System.err.println("## SVM TRAINING (" + foldIndex + "/" + foldCount + ") END ##");
	}

	@Override
	public double test() throws Exception {
		System.err.println("## SVM TESTING (" + foldIndex + "/" + foldCount + ") START ##");
		
		int totalCount = 0;
	  	int correctCount = 0;
		double accuracy = 0.0;
		
		MessageFeatures mf = null;
		
	  	while((mf = miw.getNextTestingMessage()) != null)
	  	{
	  		int c = mf.newsgroupNumber;
	  		
	  		Counter<String> counter = new Counter<String>();
	  		counter.incrementAll(mf.subject);
	  		counter.incrementAll(mf.body);
	  		
	  		int validCount = 0;
	  		for(String word : counter.keySet())
	  			if(featureIndex.containsKey(word)) validCount++;
  			
			svm_node[] x = new svm_node[validCount];
  			
			int i=0; 
	  		for(String word : counter.keySet())
	  		{
	  			if(!featureIndex.containsKey(word)) continue;
	  			
	  			int index = featureIndex.get(word);
				x[i] = new svm_node();
				x[i].index = index;
				x[i].value = counter.getCount(word);
				
				i++;
	  		}
	  		
	  		int predict = (int)svm.svm_predict(model, x);
	  		if(predict == c)
	  			correctCount++;
	  	
	  		/*
	  		System.err.format("Guess : %d, Correct : %d\n", predict, c);
	  		*/
	  		
	  		totalCount++;
	  	}
	  	
	  	accuracy = (double) correctCount / totalCount;
	  	System.err.format("Accuracy : %2.2f (%d / %d)\n", accuracy*100, correctCount, totalCount);
	  	
		System.err.println("## SVM TESTING (" + foldIndex + "/" + foldCount + ") END ##");
		
		return accuracy;
	}

	private void exit_with_help()
	{
		System.err.print(
			"Usage: svm_train [options] training_set_file [model_file]\n"
			+"options:\n"
			+"-s svm_type : set type of SVM (default 0)\n"
			+"	0 -- C-SVC\n"
			+"	1 -- nu-SVC\n"
			+"	2 -- one-class SVM\n"
			+"	3 -- epsilon-SVR\n"
			+"	4 -- nu-SVR\n"
			+"-t kernel_type : set type of kernel function (default 2)\n"
			+"	0 -- linear: u'*v\n"
			+"	1 -- polynomial: (gamma*u'*v + coef0)^degree\n"
			+"	2 -- radial basis function: exp(-gamma*|u-v|^2)\n"
			+"	3 -- sigmoid: tanh(gamma*u'*v + coef0)\n"
			+"	4 -- precomputed kernel (kernel values in training_set_file)\n"
			+"-d degree : set degree in kernel function (default 3)\n"
			+"-g gamma : set gamma in kernel function (default 1/num_features)\n"
			+"-r coef0 : set coef0 in kernel function (default 0)\n"
			+"-c cost : set the parameter C of C-SVC, epsilon-SVR, and nu-SVR (default 1)\n"
			+"-n nu : set the parameter nu of nu-SVC, one-class SVM, and nu-SVR (default 0.5)\n"
			+"-p epsilon : set the epsilon in loss function of epsilon-SVR (default 0.1)\n"
			+"-m cachesize : set cache memory size in MB (default 100)\n"
			+"-e epsilon : set tolerance of termination criterion (default 0.001)\n"
			+"-h shrinking : whether to use the shrinking heuristics, 0 or 1 (default 1)\n"
			+"-b probability_estimates : whether to train a SVC or SVR model for probability estimates, 0 or 1 (default 0)\n"
			+"-wi weight : set the parameter C of class i to weight*C, for C-SVC (default 1)\n"
			+"-v n : n-fold cross validation mode\n"
			+"-q : quiet mode (no outputs)\n"
		);
		System.exit(1);
	}
}
