/*
 * Copyright (C) 2012 JiangHongTiao
 *
 * This program is free software: you can redistribute it and/or modify
 * it under the terms of the GNU General Public License as published by
 * the Free Software Foundation, either version 3 of the License, or
 * (at your option) any later version.
 *
 * This program is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
 *
 * You should have received a copy of the GNU General Public License
 * along with this program.  If not, see <http://www.gnu.org/licenses/>.
 */
package sk.lieskove.jianghongtiao.paris.core.classify;

import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.persistence.EntityManager;
import javax.persistence.TypedQuery;
import org.apache.log4j.Logger;
import sk.lieskove.jianghongtiao.common.utils.Array;
import sk.lieskove.jianghongtiao.common.utils.PropertiesUtils;
import sk.lieskove.jianghongtiao.commonpersist.Persist;
import sk.lieskove.jianghongtiao.paris.core.persistence.TestSet;
import sk.lieskove.jianghongtiao.paris.core.persistence.TestSetClassificationStorage;
import sk.lieskove.jianghongtiao.paris.core.persistence.TrainSet;
import sk.lieskove.jianghongtiao.paris.core.persistence.TrainSetClassificationStorage;
import sk.lieskove.jianghongtiao.websearch.document.preprocessing.SupportedLanguages;
import sk.lieskove.jianghongtiao.websearch.enums.DocumentType;
import sk.lieskove.jianghongtiao.websearch.persistence.ClassificationStorage;

/**
 * 
 * @author xjuraj e-mail: jjurco.sk_gmail.com
 */
public class LearnSetCreator implements Serializable {
    
    private transient Logger log = Logger.getLogger(LearnSetCreator.class);
    private transient PropertiesUtils pu = new PropertiesUtils(LearnSetCreator.class);
    private final EntityManager em = Persist.getSingleton().getEntityManager();
    private TypedQuery allCI = em.createQuery(
              "SELECT cs FROM ClassificationStorage cs"
            + "  WHERE cs.documentType = :docType"
            + "  AND cs.webSearchResponse.languageName = :lang", 
            ClassificationStorage.class);
    
    /**
     * retrieve all documents from DB with classes and language specified
     * @param classes classes to retrieve
     * @param language language of documents
     * @return 
     */
    public <T> Map<String, List<T>> getDocumentsForClasses(DocumentType[] classes, SupportedLanguages language){
        Map<String, List<T>> classDocuments = new HashMap<String, List<T>>();
        synchronized(em){
            for (DocumentType classString : classes) {
                allCI.setParameter("docType", classString);
                allCI.setParameter("lang", language);
                List resultList = new ArrayList(allCI.getResultList());
                classDocuments.put(classString.name(), resultList);
            }
        }
        return classDocuments;
    }
    
    /**
     * transform documents to the format accepted by DB
     * @param items 
     * @return 
     */
    private <T> List<TrainSetClassificationStorage> transformToTrainSetClassificationStorage(
            Map<String, List<T>> items){
        List<TrainSetClassificationStorage> result = new ArrayList<TrainSetClassificationStorage>();
        for (String docClass : items.keySet()) {
            List document = items.get(docClass);
            for (Object trainExample : document) {
                if(trainExample instanceof ClassificationStorage){
                    ClassificationStorage cs = (ClassificationStorage) trainExample;
                    TrainSetClassificationStorage tscs = new TrainSetClassificationStorage();
                    tscs.setClassificationStorage(cs);
                    tscs.setOriginalClass(docClass);
                    result.add(tscs);
                }
            }
        }
        return result;
    }
    
    /**
     * transform documents to the format accepted by DB
     * @param items 
     * @return 
     */
    private <T> List<TestSetClassificationStorage> transformToTestSetClassificationStorage(
            Map<String, List<T>> items){
        List<TestSetClassificationStorage> result = new ArrayList<TestSetClassificationStorage>();
        for (String docClass : items.keySet()) {
            List document = items.get(docClass);
            for (Object trainExample : document) {
                if(trainExample instanceof ClassificationStorage){
                    
                    ClassificationStorage cs = (ClassificationStorage) trainExample;
                    TestSetClassificationStorage tscs = new TestSetClassificationStorage();
                    tscs.setClassificationStorage(cs);
                    tscs.setOriginalClass(docClass);
                    result.add(tscs);
                    
                }
            }
        }
        return result;
    }
    
    /**
     * create universal learning set with specified baseline, classes and language
     * @param baseline baseline of the training set
     * @param classes classes to retrieve
     * @param language language of the document
     * @param joinClassesString Document classes to join are in format: CLASS_1,...,CLASS_N[=>RESULT_CLASS_1];CLASS_12,...,CLASS_N2[=>RESULT_CLASS_2]...
     * @return train set ready to store to the DB and create ARFF file from
     */
    public Map<String, List<ClassificationStorage>> createLearnSet(Double baseline, DocumentType[] classes, 
            SupportedLanguages language, String joinClassesString){
        JoinDocumentManager jd = new JoinDocumentManager(joinClassesString);
        Map<String, List<ClassificationStorage>> classDocuments = getDocumentsForClasses(classes, language);
        Map<String, List<ClassificationStorage>> joinDocuments = jd.joinDocuments(classDocuments);
        Map<String, List<ClassificationStorage>> normalized = normalizeToBaseline(baseline, joinDocuments);
        
        return normalized;
    }
    
    /**
     * create training set with specified baseline, classes and language
     * @param baseline baseline of the training set
     * @param classes classes to retrieve
     * @param language language of the document
     * @param joinClassesString Document classes to join are in format: CLASS_1,...,CLASS_N[=>RESULT_CLASS_1];CLASS_12,...,CLASS_N2[=>RESULT_CLASS_2]...
     * @return train set ready to store to the DB and create ARFF file from
     */
    public TrainSet createTrainSet(Map<String, List<ClassificationStorage>> learnSet, SupportedLanguages language){
        TrainSet result = new TrainSet();
//        Map<String, List> normalized = createLearnSet(baseline, classes, language, 
//                joinClassesString);
        result.setBaseline(countBaseline(learnSet));
        result.setLang(language);
        result.setClasses(learnSet.keySet().toArray(new String[0]));
        result.setTrainSetClassificationStorage(transformToTrainSetClassificationStorage(learnSet));
        
        return result;
    }
    
    /**
     * create testing set with specified baseline, classes and language
     * @param baseline baseline of the training set
     * @param classes classes to retrieve
     * @param language language of the document
     * @param joinClassesString Document classes to join are in format: CLASS_1,...,CLASS_N[=>RESULT_CLASS_1];CLASS_12,...,CLASS_N2[=>RESULT_CLASS_2]...
     * @return train set ready to store to the DB and create ARFF file from
     */
    public TestSet createTestSet(Map<String, List<ClassificationStorage>> learnSet, SupportedLanguages language){
        TestSet result = new TestSet();
//        Map<String, List> normalized = createLearnSet(baseline, classes, language, 
//                joinClassesString);
        result.setBaseline(countBaseline(learnSet));
        result.setLang(language);
        result.setClasses(learnSet.keySet().toArray(new String[0]));
        result.setTestSetClassificationStorage(transformToTestSetClassificationStorage(learnSet));
        return result;
    }
    
    /**
     * count baseline of documents
     * @param items items to count baseline for
     * @return baseline of documents
     */
    public <T> double countBaseline(Map<String, List<T>> items){
        int count = Array.countListsSize(items.values());
        int max = Array.getMaxListSize(items.values());
        return (100.0/count)*max; 
    }
    
    /**
     * normalize number of documents to baseline
     * @param baseline normalize number of documents to this baseline
     * @param normalize map of documents to baseline
     * @return normalized map 
     */
    public <T> Map<String, List<T>> normalizeToBaseline(Double baseline, Map<String, List<T>> normalize){
        Map<String, List<T>> result = new HashMap<String, List<T>>();
        int count = Array.countListsSize(normalize.values());
        int max = Array.getMaxListSize(normalize.values());
        double a = baseline/100.0;
        double b = count-max;
        int maxItems = (int) Math.round((-1.0*a*b)/(a-1.0));
        for (String docClass : normalize.keySet()) {
            List documents = normalize.get(docClass);
            if(documents.size() > maxItems){ 
                result.put(docClass, Array.randomSubset(documents, maxItems));
            } else {
                result.put(docClass, documents);
            }
        }
        return result;
    }
    
    
}
