/*
 * Copyright (C) 2011 JiangHongTiao
 *
 * This program is free software: you can redistribute it and/or modify
 * it under the terms of the GNU General Public License as published by
 * the Free Software Foundation, either version 3 of the License, or
 * (at your option) any later version.
 *
 * This program is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
 *
 * You should have received a copy of the GNU General Public License
 * along with this program.  If not, see <http://www.gnu.org/licenses/>.
 */
package sk.lieskove.jianghongtiao.paris.webclient.web;

import net.sourceforge.stripes.action.*;
import net.sourceforge.stripes.integration.spring.SpringBean;
import sk.lieskove.jianghongtiao.common.utils.PropertiesUtils;
import sk.lieskove.jianghongtiao.paris.webclient.model.DocumentManager;
import sk.lieskove.jianghongtiao.paris.webclient.persistence.ClassificationItem;
import java.io.Serializable;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import net.sourceforge.stripes.validation.Validate;
import sk.lieskove.jianghongtiao.commonpersist.Persist;
import sk.lieskove.jianghongtiao.paris.core.classify.old.ArffThread;
import sk.lieskove.jianghongtiao.paris.core.classify.old.ClassifyManager;
import sk.lieskove.jianghongtiao.paris.core.classify.ThreadClassifier;
import sk.lieskove.jianghongtiao.paris.core.classify.enums.SupportedClassifiers;
import sk.lieskove.jianghongtiao.paris.core.persistence.ClassifierStatistics;
import sk.lieskove.jianghongtiao.paris.core.persistence.EvaluatorTestStatistics;
import sk.lieskove.jianghongtiao.websearch.persistence.ClassificationStorage;
import weka.classifiers.Evaluation;
import weka.core.Instances;

/**
 * 
 * @author xjuraj e-mail: jjurco.sk_gmail.com
 */
@UrlBinding(value = "/batch/{$event}")
public class BatchClassificationActionBean extends AbstractActionBean implements Serializable {

    private transient PropertiesUtils pu = new PropertiesUtils(BatchClassificationActionBean.class);
    @SpringBean
    private DocumentManager documentManager;
    private List<String> languages;
    private List<String> trainDocTypes;
    private String joinTrainClasses;
    private List<String> testDocTypes;
    private String joinTestClasses;
    @Validate(minvalue = 1, maxvalue = 99, required = false)
    private Integer baseline = 60;
    @Validate(minvalue = 1, maxvalue = 9999, required = false)
    private Integer wordsToKeep = 500;
    @Validate(minvalue = 1, maxvalue = 1000, required = false)
    private Integer repeat = 20;
    @Validate(required = false)
    private List<String> classifiers;

    @DefaultHandler
    public Resolution display() {
        List langList = documentManager.getUsedLanguageList();
        List docTypeList = documentManager.getUsedClasses();
        List supportedClassifiers = SupportedClassifiers.asNameList();
        Collections.sort(langList);
        this.getContext().getRequest().setAttribute("langList", langList);
        this.getContext().getRequest().setAttribute("docTypeList", docTypeList);
        this.getContext().getRequest().setAttribute("classifierList", supportedClassifiers);
        return new ForwardResolution("/WEB-INF/pages/batchClassify.jsp");
    }

//    private List<ClassificationItem> getLearningSet(List<String> docTypeList) {
//        //language
//        //baseline
//        //training/test doc types
//        //select documents by language and docType is in training/test set
//        //query for each docType from training/test set
//        //count baseline, select randomly num of documents
//        if ((docTypeList == null) || docTypeList.isEmpty()) {
//            return null;
//        }
//
//        List<ClassificationItem> result = new ArrayList<ClassificationItem>();
//        Map<String, List<ClassificationStorage>> docTypeCI = new HashMap<String, List<ClassificationStorage>>();
//        int max = 0;
//        int count = 0;
//        for (String docType : docTypeList) {
//            List<ClassificationStorage> items = documentManager.getNotExcludedByLangDocType(languages.get(0), docType);
//            max = Math.max(max, items.size());
//            count = count + items.size();
//            docTypeCI.put(docType, items);
//        }
//
//        return ClassifyManager.normalizeToBaseline(baseline, max, count,
//                docTypeCI, docTypeList);
//    }

    /**
     * retrieve random instances (train-test pairs) from the database
     * @return map of pair instances train-test pair. Train is as a key, test is as a value
     */
    private Map<Instances, Instances> retrieveListOfInstances() {
        Map<Instances, Instances> result = new HashMap<Instances, Instances>();
//        List<String> typeList = documentManager.getUsedClasses();
        List<ArffThread> trainThreads = new ArrayList<ArffThread>();
        List<ArffThread> testThreads = new ArrayList<ArffThread>();
//
//        for (int i = 0; i < repeat; i++) {
//            if (trainDocTypes != null) {
//                ArffThread at = new ArffThread(getLearningSet(trainDocTypes),
//                        trainDocTypes, joinTrainClasses, languages, wordsToKeep,
//                        "ArffThread retrieve train instances " + i);
//                at.start();
//                trainThreads.add(at);
//                //old
////                trainSet = getValidInstances(trainDocTypes, joinTrainClasses);
//            } else {
//                throw new IllegalArgumentException("Training set was not specified!");
//            }
//            if (testDocTypes != null) {
//                ArffThread at = new ArffThread(getLearningSet(testDocTypes),
//                        testDocTypes, joinTestClasses, languages, wordsToKeep,
//                        "ArffThread retrieve test  instances " + i);
//                at.start();
//                testThreads.add(at);
////                testSet = getValidInstances(testDocTypes, joinTestClasses);
//            } else {
//                ArffThread at = new ArffThread(getLearningSet(trainDocTypes),
//                        trainDocTypes, joinTrainClasses, languages, wordsToKeep,
//                        "ArffThread retrieve test  instances " + i);
//                at.start();
//                testThreads.add(at);
////                testSet = getValidInstances(trainDocTypes, joinTrainClasses);
//            }
////            result.put(trainSet, testSet);
//        }
//        for (int i = 0; i < repeat; i++) {
//            try {
//                trainThreads.get(i).join();
//                testThreads.get(i).join();
//                result.put(trainThreads.get(i).getModifiedInstances(),
//                        testThreads.get(i).getModifiedInstances());
//            } catch (InterruptedException ex) {
//                log.error("Thread was interrupted! Creating test or train set!");
//            }
//
//        }
        return result;
    }

    public Resolution process() {
        Map<Instances, Instances> ttInstances = retrieveListOfInstances();
        Map<ClassifierStatistics, List<EvaluatorTestStatistics>> results =
                new HashMap<ClassifierStatistics, List<EvaluatorTestStatistics>>();
//        Map<String, List<EvaluationDelagator>> results = 
//                new HashMap<String, List<EvaluationDelagator>>();
////        for (String classifierName : classifiers) {
////            List<ThreadClassifier> tcList = new ArrayList<ThreadClassifier>();
////            //create and run classifier and evaluator
////            for (Instances train : ttInstances.keySet()) {
////                Instances test = ttInstances.get(train);
////                try {
////                    ThreadClassifier tc = new ThreadClassifier(
////                            SupportedClassifiers.byName(classifierName),
////                            train, test, new Evaluation(test), classifierName);
////                    tc.start();
////                    tcList.add(tc);
////                } catch (Exception ex) {
////                    log.error("Cannot create evaluator for classifier: " + classifierName);
////                }
////            }
////            //wait for classifiers and evaluators to finish and collect results
//////            List<EvaluationDelagator> edList = new ArrayList<EvaluationDelagator>();
////            Timestamp date = new Timestamp((new Date()).getTime());
////            List<EvaluatorTestStatistics> edList = new ArrayList<EvaluatorTestStatistics>();
////            Persist p = Persist.getSingleton();
////            int counter = 0;
////            for (ThreadClassifier threadClassifier : tcList) {
////                counter++;
////                try {
////                    threadClassifier.join();
////                } catch (InterruptedException ex) {
////                    log.info("Thread was interrupted!");
////                }
////                EvaluatorTestStatistics cr = new EvaluatorTestStatistics(classifierName, date, counter,
////                        threadClassifier.getClassifier(),
////                        threadClassifier.getEvaluation(),
////                        threadClassifier.getTrainInstances());
////                p.persist(cr);
////                edList.add(cr);
//////                edList.add(new EvaluationDelagator(threadClassifier.getEvaluation()));
////            }
//            results.put(new ClassifierStatistics(classifierName, edList, baseline, languages.get(0)), edList);
//        }

        this.getContext().getRequest().setAttribute("results", results);
        return new ForwardResolution("/WEB-INF/pages/classifyResults.jsp");
    }

    public List<String> getClassifiers() {
        return classifiers;
    }

    public void setClassifiers(List<String> classifiers) {
        this.classifiers = classifiers;
    }

    public List<String> getLanguage() {
        return languages;
    }

    public void setLanguage(List<String> language) {
        this.languages = language;
    }

    public List getTestDocTypes() {
        return testDocTypes;
    }

    public List getTrainDocTypes() {
        return trainDocTypes;
    }

    public Integer getBaseline() {
        return baseline;
    }

    public void setBaseline(Integer baseline) {
        this.baseline = baseline;
    }

    public Integer getWordsToKeep() {
        return wordsToKeep;
    }

    public void setWordsToKeep(Integer wordsToKeep) {
        this.wordsToKeep = wordsToKeep;
    }

    public String getJoinTestClasses() {
        return joinTestClasses;
    }

    public void setJoinTestClasses(String joinTestClasses) {
        this.joinTestClasses = joinTestClasses;
    }

    public String getJoinTrainClasses() {
        return joinTrainClasses;
    }

    public void setJoinTrainClasses(String joinTrainClasses) {
        this.joinTrainClasses = joinTrainClasses;
    }

    public Integer getRepeat() {
        return repeat;
    }

    public void setRepeat(Integer repeat) {
        this.repeat = repeat;
    }

    public void setTestDocTypes(List<String> testDocTypes) {
        this.testDocTypes = testDocTypes;
    }

    public void setTrainDocTypes(List<String> trainDocTypes) {
        this.trainDocTypes = trainDocTypes;
    }
}
