import java.io.EOFException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

import cs224n.util.Counter;
import cs224n.util.CounterMap;

/**
 * Represents base class for Naive Bayes.  All classifiers should extend from this class.
 * @author alecmgo@gmail.com
 *
 */
public abstract class BaseClassifier {
  //Parameters for k-fold cross validation
  final int k = 10;
  final boolean SKIP_K_FOLDING = true;
  
  //Parameter for simple testing
  final int NUM_TEST_DOCS_PER_CLASS = 20;

  final List<MessageFeatures> allMessages;
  List<MessageFeatures> testList = new ArrayList<MessageFeatures>();
  
  CounterMap<String, Integer> termCount = new CounterMap<String, Integer>();
  CounterMap<String, Integer> conditionalProbabilities = new CounterMap<String, Integer>();

  Counter<Integer> newsgroupCount = new Counter<Integer>();
  Counter<Integer> newsgroupPrior = new Counter<Integer>();
  
  public BaseClassifier() {
    this.allMessages = null;
    throw new RuntimeException("BaseClassifier should not be called with empty field!");
  }
  
  public BaseClassifier(MessageIterator mi) {
    this.allMessages = digest(mi);
  }

  abstract void calculateConditionalProbabilities();
  
  abstract double getScore(MessageFeatures message, int newsgroup);
  
  /**
   * Main entry point into this class.  This 2 simple things:
   * <ul>
   * <li>trains the classifier</li>
   * <li>runs a test</li>
   * </ul>
   * @param mi
   */
  public void process(int outputType) {
    if(SKIP_K_FOLDING) {
      train(allMessages, k, 0);
      runTest(outputType);
    } else {
      //Perform K-folds testing
      double score = 0;
      for(int runCount = 0; runCount < k; runCount++) {
        //Reset all the counters
        testList = new ArrayList<MessageFeatures>();
        termCount = new CounterMap<String, Integer>();
        conditionalProbabilities = new CounterMap<String, Integer>();
        newsgroupCount = new Counter<Integer>();
        newsgroupPrior = new Counter<Integer>();
              
        train(allMessages, k, runCount);
        score += runTest(0);
        System.out.println("Test list size was: " + testList.size());
        System.out.println("Term count size was: " + termCount.size());
        System.out.println("Newsgroup Count size was: " + newsgroupCount.size());
        System.out.println("K-folding cross validation finished run " + runCount);
        System.out.println("Summed score: " + score + "; Average: " + (score / (runCount + 1)));
      }
      System.out.println("Average score for k-folding: " + score / k);
    }
  }

  /**
   * Digests the message iterator 
   * @param mi
   * @return
   */
  List<MessageFeatures> digest(MessageIterator mi) {
    long startTime = System.currentTimeMillis();
    System.err.println("Started reading messages");
    
    List<MessageFeatures> allMessages = new ArrayList<MessageFeatures>();
    try {
      MessageFeatures mf;
      while((mf = mi.getNextMessage()) != null) {
        allMessages.add(mf);
      }
    } catch(EOFException eofException) {
      try {
        mi.close();
      } catch(IOException ioException) {
        eofException.printStackTrace();
      }
    } catch(Exception e) {
      e.printStackTrace();
    }
    
    long endTime = System.currentTimeMillis();
    System.err.println("Finished reading messages in " + (endTime - startTime) + " ms...");
    return allMessages;
  }
  
  /**
   * Train the classifier.  This roughly follows the algorithm listed on page 240.
   * @param mi
   */
  void train(List<MessageFeatures> messages, int k, int runCount) {
    long startTime = System.currentTimeMillis();
    System.err.println("Start training...");

    processCounts(messages, k, runCount);
    calculateNewsgroupPriors();
    System.err.println("# Terms in Corpus: " + termCount.size());
    calculateConditionalProbabilities();

    long endTime = System.currentTimeMillis();
    System.err.println("Finished training in " + (endTime - startTime) + " ms...");
  }
  
  /**
   * Performs basic counts.  This class also takes care of adding items to the test set
   * @param messages
   * @param k
   * @param runCount
   */
  void processCounts(List<MessageFeatures> messages, int k, int runCount) {
    
    for(int count = 0; count < messages.size(); count++) {
      MessageFeatures message = messages.get(count);
      
      //Add item to test set if necessary
      if(SKIP_K_FOLDING) {
        if(newsgroupCount.getCount(message.newsgroupNumber) < NUM_TEST_DOCS_PER_CLASS) {
          testList.add(message);
        }
      } else {
        if((count % k) == runCount) {
          testList.add(message);
          continue;
        }
      }

      //Calculate numerator for P(c)
      newsgroupCount.incrementCount(message.newsgroupNumber);

      //Calculate numerator for P(t|c)
      for(String term : message.body.keySet()) {
        adjustCount(message, term);
      }      
    }
  }

  /**
   * Adjusts counts.  This is abstract because the Binomial and Multinomial classifiers count things diffrently
   * @param message
   * @param term
   */
  abstract void adjustCount(MessageFeatures message, String term);
  
  /**
   * Calculates the class (newsgroup) priors P(c) (see formula 13.5)
   */
  void calculateNewsgroupPriors() {
    double totalNewsgroupMessages = newsgroupCount.totalCount();
    for(Integer newsgroup : newsgroupCount.keySet()) {
      //System.out.println(newsgroupCount.getCount(newsgroup) / totalNewsgroupMessages);
      newsgroupPrior.setCount(newsgroup, newsgroupCount.getCount(newsgroup) / totalNewsgroupMessages);
    }
  }
  
  /**
   * Runs test.  This is run many times by the k-fold cross-validation code
   */
  double runTest(int outputType) {
    long startTime = System.currentTimeMillis();
    System.err.println("Starting test...");
    
    int numCorrect = 0;
    int count = 0;
    for(MessageFeatures mf : testList) {
      double[] scores = new double[newsgroupCount.size()];
      for(int newsgroup = 0; newsgroup < newsgroupCount.size(); newsgroup++) {
        scores[newsgroup] = getScore(mf, newsgroup);
      }
      
      int bestClass = Util.getIndexOfMax(scores);
      if(bestClass == mf.newsgroupNumber) {
        numCorrect++;
      }
      
      if(outputType == OutputType.DEBUG) {
        System.out.print(mf.newsgroupNumber + "\t" + bestClass + "\t");
        NaiveBayesClassifier.outputProbability(scores);
      } else if(outputType == OutputType.PROBABILITIES_ONLY) {
        NaiveBayesClassifier.outputProbability(scores);
      } else if(outputType == OutputType.CLASSES_ONLY) {
        if(count % NUM_TEST_DOCS_PER_CLASS == 0) {
          System.out.println("");
        }
        System.out.print(bestClass + "\t");
      }
      count++;
    }
    
    System.err.println("Accuracy was " + (double) numCorrect / (double) testList.size() + " (" + numCorrect + "/" + testList.size() + " correct)");
    double testScore = (double) numCorrect / (double) testList.size();
    
    long endTime = System.currentTimeMillis();
    System.err.println("Finished test. " + (endTime - startTime) + " elapsed.");
    
    return testScore;
  }
}