package org.hackystat.sdsa.analyzer.control;

import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.logging.Logger;

import jess.Batch;
import jess.Fact;
import jess.JessException;
import jess.QueryResult;
import jess.RU;
import jess.Rete;
import jess.Value;
import jess.ValueVector;

import org.hackystat.sdsa.analyzer.SdsaAnalyzerException;
import org.hackystat.sdsa.analyzer.control.compressor.CompressorContainer;
import org.hackystat.sdsa.analyzer.control.compressor.EditCompressor;
import org.hackystat.sdsa.analyzer.control.compressor.RefactoringCompressor;
import org.hackystat.sdsa.analyzer.control.processor.ActivityBuilder;
import org.hackystat.sdsa.analyzer.control.tokenizer.BufferTransTokenizer;
import org.hackystat.sdsa.analyzer.control.tokenizer.Tokenizer;
import org.hackystat.sdsa.analyzer.control.tokenizer.UnitTestPassTokenizer;
import org.hackystat.sdsa.analyzer.model.Episode;
import org.hackystat.sdsa.analyzer.model.activity.Activity;
import org.hackystat.sdsa.service.resource.devstream.jaxb.DevStream;
import org.hackystat.sdsa.service.resource.devstream.jaxb.MacroBehavior;
import org.hackystat.sdsa.service.resource.devstream.jaxb.MicroBehavior;
import org.hackystat.sensorbase.resource.sensordata.jaxb.SensorData;

/**
 * Implements a DevStreamProcessor that manages the lifecycle from raw sensor data entry 
 * to DevStreamResource.
 * 
 * @author hongbing
 *
 */
public class DevStreamBuilder {
  /** Episode tokenizer. */
  private String tokenizer;
  /** Recognizing ruleset. */
  private String ruleset;
  /** Logger. */
  private Logger logger;
  
  /** Activity builder. */
  private ActivityBuilder activityBuilder;
  /** Compressor container. */
  private CompressorContainer compressorContainer;
  
  /** Tokenizer map. */
  private Map<String, Tokenizer> tokenizerMap;
  
  /** A map containing ruleset and the file defining all the rules. */
  private Map<String, String> microBehaviorMap;
  /** A map containing ruleset and the file defining all the rules. */
  private Map<String, String> macroBehaviorMap;
  
  /** An interpreter for micro-behavior of episodes. */
  private TDDEpisodeInterpreter interpreter;  
  
  /**
   * Constructs a devstream processor.
   * 
   * @param tokenizer Episode tokenizer.
   * @param ruleset Recognizing ruleset.
   * @param logger Logger.
   * @throws SdsaAnalyzerException If error occurs.
   */
  public DevStreamBuilder(String tokenizer, String ruleset, Logger logger) 
       throws SdsaAnalyzerException {
    this.activityBuilder = new ActivityBuilder(logger);
    
    // Compressors
    this.compressorContainer = new CompressorContainer();
    this.compressorContainer.addCompressor(new EditCompressor());
    this.compressorContainer.addCompressor(new RefactoringCompressor());
    
    // Tokenizers
    this.tokenizerMap = new TreeMap<String, Tokenizer>();
    this.tokenizerMap.put("testpass", new UnitTestPassTokenizer());
    this.tokenizerMap.put("buffertrans", new BufferTransTokenizer());
    
    // Ruleset
    this.microBehaviorMap = new TreeMap<String, String>();
    // Currently, only the TDD rules are defined.
    this.microBehaviorMap.put("tdd", "TDD.clp");
    
    this.macroBehaviorMap = new TreeMap<String, String>();
    this.macroBehaviorMap.put("tdd", "TDDConformanceHeuristicAlgorithm.clp");
    
    this.interpreter = new TDDEpisodeInterpreter();
    
    logger.info("Tokenizer: " + tokenizer);
    this.tokenizer = tokenizer.toLowerCase();
    
    logger.info("Ruleset: " + ruleset);
    this.ruleset = ruleset.toLowerCase();
    
    // Check the availability of ruleset.
    if (!this.microBehaviorMap.containsKey(this.ruleset)) {
      throw new SdsaAnalyzerException("No appropare rules defined for '" + this.ruleset + '\'');
    }
    
    this.logger = logger;
  }

  /**
   * Adds a SensorData to the development stream.
   * 
   * @param sensorData SensorData.
   * @throws SdsaAnalyzerException If error occurs.
   */
  public void addDevEvent(SensorData sensorData) throws SdsaAnalyzerException {
    this.activityBuilder.process(sensorData);
  }

  /**
   * Builds a DevStream resource.
   * 
   * @return DevStream resource.
   * @throws SdsaAnalyzerException If error occurs.
   */
  public DevStream build() throws SdsaAnalyzerException {
    this.logger.info("Retrieving activity list.");
    // Compress the redundant activities converted from raw sensor data.
    List<Activity> activityList = this.activityBuilder.getActivityList();
    this.compressorContainer.compress(activityList);
    
    this.logger.info("Tokenizing activities in DevStream.");
    
    // Tokenizes episodes out of dev stream activity list.
    Tokenizer tokenizer = this.tokenizerMap.get(this.tokenizer);
    List<Episode> episodeList = tokenizer.execute(activityList);
    
    this.logger.info("Evaluating the episode using ruleset:" + this.ruleset);
    
    // Loads the rule file.
    Rete engine = loadMicroBehaviorRules(this.microBehaviorMap.get(this.ruleset));
    
    this.logger.info("Building DevStream resource.");
    DevStream devStream = new DevStream();

    // Evaluate the micro-behavior.
    for (Episode episode : episodeList) {
      try {
        MicroBehavior microBehavior = this.recognizeMicroBehavior(engine, episode);
        episode.setMicroBehavior(microBehavior);
      }
      catch (JessException e) {
        throw new SdsaAnalyzerException("Error in recognizing microbehavior of an episode: " 
            + episode, e);
      }
    }
    
    // Evaluate the macro-behavior.
    this.deriveMacroBehavior(engine, episodeList, this.macroBehaviorMap.get(this.ruleset));    

    for (Episode episode : episodeList) {
      devStream.getEpisodeTO().add(episode.toTransferObject());
    }
    
    return devStream;
  }
  
  /**
   * Derives macro behavior of episodes. 
   * 
   * @param engine Rule engine.
   * @param episodeList List of episodes.
   * @param rulefile Rule file.
   * @throws SdsaAnalyzerException If error in deriving macro behaviors.
   */
  private void deriveMacroBehavior(Rete engine, List<Episode> episodeList,
      String rulefile) throws SdsaAnalyzerException {
    try {
      Batch.batch("/org/hackystat/sdsa/analyzer/control/rules/tdd/" + rulefile, engine);
    }
    catch (JessException e) {
      e.printStackTrace();
      throw new SdsaAnalyzerException("Error in loading rule file: " + rulefile, e);
    }
    
    for (int i = 0; i < episodeList.size(); i++) {
      try {
        assertMicroBehavior(engine, episodeList.get(i), i);
      }
      catch (JessException e) {
        throw new SdsaAnalyzerException("Error in asserting behavior for " + (i + 1) + 
            "th episode." , e);
      }
    }
    
    // Fires up rules.
    try {
      engine.run();
      //this.engine.executeCommand("(facts)");
    }
    catch (JessException e) {
      e.printStackTrace();
      throw new SdsaAnalyzerException("Possible engine problem", e);
    }
    
    // Queries TDD episode classification.
    try {
      QueryResult result;
      for (int i = 0; i < episodeList.size(); i++) {
        result = engine.runQueryStar("episode-tdd-conformance-query-by-index", 
            (new ValueVector()).add(new Value(i, RU.INTEGER)));
        if (result.next()) {
          MacroBehavior macroBehavior = new MacroBehavior();
          boolean isTDD = Boolean.parseBoolean(result.getString("isTDD"));
          if (isTDD) {
            macroBehavior.setBehavior("TDD");
          }
          else {
            macroBehavior.setBehavior("NOT TDD");
          }

          macroBehavior.setExplanation(result.getString("exp"));
          
          Episode episode = episodeList.get(i);
          episode.setMacroBehavior(macroBehavior);
        }
        else {
          throw new SdsaAnalyzerException("MacroBehavior of at " + (i + 1) + 
              "th episode can not be queried");
        }
      }
    }
    catch (JessException e) {
      throw new SdsaAnalyzerException("Error while querying episode result", e);
    }
  }

  /**
   * Asserts the micro behavior of the episode into rule engine.
   * 
   * @param engine Rule engine.
   * @param episode Episode.
   * @param index The index of this episode.
   * @return Asserted fact. 
   * @throws JessException If Jess error occurs. 
   */
  private Fact assertMicroBehavior(Rete engine, Episode episode, int index) 
      throws JessException {
    String behavior = episode.getMicroBehavior().getBehavior();
    String[] behaviorData = behavior.split(":");
    String name = behaviorData[0];
    String type = behaviorData[1];
    
    Fact f = new Fact("EpisodeConformance", engine);
    f.setSlotValue("index", new Value(index, RU.INTEGER));
    f.setSlotValue("category", new Value(name, RU.STRING));
    f.setSlotValue("subtype", new Value(type, RU.STRING));
    
    Fact assertedFact = engine.assertFact(f);
    return assertedFact;
  }
  
  /**
   * Initiates Loads jess rules for classification.
   * 
   * @param rulefile Classifier rule file.
   * @return Rete rule engine.
   * @throws SdsaAnalyzerException If error in loading rules.
   */
  private Rete loadMicroBehaviorRules(String rulefile) throws SdsaAnalyzerException {
    // Defines rule engine.
    Rete engine = new Rete();
    
    try {
      // Loads activity template definition.
      Batch.batch("/org/hackystat/sdsa/analyzer/model/rules/ActivityTemplate.clp", engine);
    }
    catch (JessException e) {
      e.printStackTrace();
      throw new SdsaAnalyzerException("Error in loading rule file: ActivityTemplate.clp", e);
    }

    try {    
      // Loads episode category definition.
      Batch.batch("/org/hackystat/sdsa/analyzer/model/rules/EpisodeTemplate.clp", engine);
    }
    catch (JessException e) {
      e.printStackTrace();
      throw new SdsaAnalyzerException("Error in loading rule file: EpisodeTemplate.clp", e);
    }
  
    try {
      Batch.batch("/org/hackystat/sdsa/analyzer/control/rules/tdd/" + rulefile, engine);
    }
    catch (JessException e) {
      e.printStackTrace();
      throw new SdsaAnalyzerException("Error in loading rule file: " + rulefile, e);
    }
   
    return engine;
  }
  
  /**
   * Recognizes episode using rules.  
   * 
   * @param engine Loaded rule engine.
   * @param episode Tokenized episode.
   * @return Tokenized and recognized episode.
   * @throws JessException Error in invoking rule engine. 
   * @throws SdsaAnalyzerException If error in interpreting results.
   */
  private MicroBehavior recognizeMicroBehavior(Rete engine, Episode episode) 
      throws JessException, SdsaAnalyzerException {
    // Clears working memory
    engine.reset();
    
    // Asserts actions as fact into working memory and return unused actions list
    int index = 1;
    for (Activity activity : episode.getActivityList()) {
      activity.assertActivity(index++, engine);
    }
    
    // Fires up classification rules.
    engine.run();
    
    // Get episode category
    String categoryName = "unknown";
    String categoryType = "1";
    String categoryExp = "";
    //String categoryExp = "No rule applies";
    
    // Queries episode classification result.
    //Iterator classifier = engine.runQuery("episode-classification-query", new ValueVector());
    QueryResult result = engine.runQueryStar("episode-classification-query", new ValueVector());
    // engine.executeCommand("(facts)");
    if (result.next()) {
      categoryName = result.getString("cat");
      categoryType = result.getString("tp");
      categoryExp = result.getString("exp");
    }

    // Interpret rule engine evaluation results.
    MicroBehavior episodeBehavior = this.interpretMicroBehavior(categoryName, 
        categoryType, categoryExp);
    
    return episodeBehavior;
  }
  
  /**
   * Interprets episode classification results using a helper class.
   * 
   * @param category Micro-behavior category.
   * @param type Micro-behavior type.
   * @param extra Interpretation result.
   * @return MicroBehavior with interpreted results. 
   * @throws SdsaAnalyzerException If error occurs.
   */
  private MicroBehavior interpretMicroBehavior(String category, String type, String extra) 
      throws SdsaAnalyzerException {
    String[] params = {};
    if (!"".equals(extra)) {
      params = extra.split(",");  
    }
    
    String explanation = interpreter.interpret(category, type, params);

    MicroBehavior microBehavior = new MicroBehavior();
    microBehavior.setBehavior(category + ":" + type);
    microBehavior.setExplanation(explanation);
    
    return microBehavior;
  }
}