package per.hnvcam.searchext.services.impl;

import java.io.IOException;
import java.util.List;

import org.apache.tika.exception.TikaException;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.parser.ParseContext;
import org.apache.tika.parser.html.HtmlParser;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Service;
import org.xml.sax.SAXException;

import per.hnvcam.common.OrderedSet;
import per.hnvcam.common.impl.OrderedSetImpl;
import per.hnvcam.hedge.algebra.Hedge2AlgebraDefinition;
import per.hnvcam.hedge.algebra.Hedge2Quantifier;
import per.hnvcam.net.http.HTTPConnection;
import per.hnvcam.searchext.Constants;
import per.hnvcam.searchext.data.dao.ConfigDAO;
import per.hnvcam.searchext.data.dao.WebReferenceDAO;
import per.hnvcam.searchext.data.dao.WordCountDAO;
import per.hnvcam.searchext.data.dao.WordDAO;
import per.hnvcam.searchext.data.model.*;
import per.hnvcam.searchext.parser.WebParser;
import per.hnvcam.searchext.parser.impl.WebParserImpl;
import per.hnvcam.searchext.services.SpecialQuery;
import per.hnvcam.searchext.services.TrainingService;
import per.hnvcam.searchext.text.CommonWords;
import per.hnvcam.searchext.text.SentenceExtractor;
import per.hnvcam.searchext.text.SentenceHandler;
import per.hnvcam.searchext.text.WordCounter;
import per.hnvcam.searchext.text.impl.DefaultSentenceExtractorImpl;
import per.hnvcam.searchext.text.impl.DefaultSentenceHandlerImpl;
import per.hnvcam.searchext.text.impl.WordCounterImpl;
import per.hnvcam.searchext.text.token.TokenHandler;
import per.hnvcam.searchext.text.token.Tokenizer;
import per.hnvcam.searchext.text.token.impl.DefaultTokenHandlerImpl;
import per.hnvcam.searchext.text.token.impl.DefaultTokenizerImpl;

/**
 * Created by IntelliJ IDEA.
 * User: violon
 * Date: 4/24/11
 * Time: 9:12 PM
 * To change this template use File | Settings | File Templates.
 */
@Service("TrainingService")
public class TrainingServiceImpl extends CallbackSupportServiceImpl implements TrainingService, Runnable, InitializingBean {

   private WebParser webParser;

   private Tokenizer tokenizer;

   private SentenceExtractor sentenceExtractor;

   private SentenceHandler sentenceHandler;

   private TokenHandler tokenHandler;

   private WordCounter wordCounter;

   @Autowired
   private HtmlParser htmlParser;

   @Autowired
   private HTTPConnection httpConnection;

   @Autowired
   private ParseContext parseContext;

   @Autowired
   private Metadata metadata;

   @Autowired
   @Qualifier("WordCountDAO")
   private WordCountDAO wordCountDAO;

   @Autowired
   @Qualifier("CommonWords")
   private CommonWords commonWords;

   @Autowired
   @Qualifier("SpecialQuery")
   private SpecialQuery specialQuery;

   @Autowired
   private WordDAO wordDAO;

   @Autowired
   private Hedge2AlgebraDefinition hedgeAlgebraDefinition;

   @Autowired
   private Hedge2Quantifier fuzzyQuantifier;

   @Autowired
   private WebReferenceDAO webReferenceDAO;

   public TrainingServiceImpl() {

   }

   @Override
   public void reset() {
      wordCounter = null;
      tokenHandler = null;
      tokenizer = null;
      sentenceHandler = null;
      sentenceExtractor = null;
      webParser = null;
   }

   public WordCounter getWordCounter() {
      if (wordCounter == null) {
         wordCounter = new WordCounterImpl();
      }
      return wordCounter;
   }

   @Override
   public void determineWordAmount() {
      List<Word> wordList = wordDAO.listWords();
      OrderedSet<String> domain = hedgeAlgebraDefinition.getDomainOfLength(2);
      OrderedSet<Double> ranges = fuzzyQuantifier.getFuzzinessInterval(2);
      for (Word word : wordList) {
         if (word.getMaxCount() < domain.size()) continue;
         OrderedSet<Double> fuzzinessIntervals = multiply(ranges, word.getMaxCount());
         List<WordCount> wordCounts = wordCountDAO.findByWord(word);
         for (WordCount wc : wordCounts) {
            for (int i = 0; i < domain.size(); i++) {
               if (wc.getCount() > fuzzinessIntervals.getAt(i) && ( wc.getCount() <= fuzzinessIntervals.getAt(i+1) ||
                  i == (domain.size() - 1) ) ) {
                  wc.setFuzzyAmount(domain.getAt(i));
                  wordCountDAO.saveOrUpdate(wc);
               }
            }
         }
      }
      executeCallback();
   }

   private OrderedSet<Double> multiply(OrderedSet<Double> item, double by) {
      OrderedSet<Double> result = new OrderedSetImpl<Double>();
      for (Double d : item) {
         result.add(d * by);
      }
      return result;
   }

   public TokenHandler getTokenHandler() {
      if (tokenHandler == null) {
         SystemConfiguration systemConfiguration = specialQuery.getSystemConfiguration();
         tokenHandler = new DefaultTokenHandlerImpl();
         tokenHandler.setTokenCountMaximum(systemConfiguration.getWordLengthMaximum());
         tokenHandler.setTokenCountMininum(systemConfiguration.getWordLengthMinimum());
         tokenHandler.setWordCounter(getWordCounter());
         tokenHandler.setCommonWords(commonWords);
      }
      return tokenHandler;
   }

   public Tokenizer getTokenizer() {
      if (tokenizer == null) {
         tokenizer = new DefaultTokenizerImpl();
         tokenizer.setHandler(getTokenHandler());
      }
      return tokenizer;
   }

   public SentenceHandler getSentenceHandler() {
      if (sentenceHandler == null) {
         sentenceHandler = new DefaultSentenceHandlerImpl();
         sentenceHandler.setTokenizer(getTokenizer());
      }
      return sentenceHandler;
   }

   public SentenceExtractor getSentenceExtractor() {
      if (sentenceExtractor == null) {
         sentenceExtractor = new DefaultSentenceExtractorImpl();
         sentenceExtractor.setHandler(getSentenceHandler());
      }
      return sentenceExtractor;
   }

   public WebParser getWebParser() {
      if (webParser == null) {
         webParser = new WebParserImpl(htmlParser, httpConnection, parseContext, metadata);
         webParser.setContentHandler(getSentenceExtractor());

      }
      return webParser;
   }

   @Override
   public void trainFromURL(String url) throws IOException, SAXException, TikaException {
      getWebParser().parse(url);
   }

   @Override
   public void trainFromWebReference(WebReference webReference) throws IOException, SAXException, TikaException {
      getWebParser().parse(webReference.getUrl());
      wordCountDAO.bulkSave(wordCounter, webReference, specialQuery.getSystemConfiguration().getWordCountMinimum());
      webReference.setProcessed(true);
      webReferenceDAO.updatePage(webReference);
      reset();
   }

   public void setHtmlParser(HtmlParser htmlParser) {
      this.htmlParser = htmlParser;
   }

   public void setHttpConnection(HTTPConnection httpConnection) {
      this.httpConnection = httpConnection;
   }

   public void setParseContext(ParseContext parseContext) {
      this.parseContext = parseContext;
   }

   public void setMetadata(Metadata metadata) {
      this.metadata = metadata;
   }

   public void setCommonWords(CommonWords commonWords) {
      this.commonWords = commonWords;
   }

   public void setSpecialQuery(SpecialQuery specialQuery) {
      this.specialQuery = specialQuery;
   }

   @Override
   public void afterPropertiesSet() throws Exception {
      specialQuery.addCallbackAction(this.getClass().getName(), this);
   }

   @Override
   public void run() {
      reset();
   }
}
