/*
 * Copyright (C) 2012 JiangHongTiao
 *
 * This program is free software: you can redistribute it and/or modify
 * it under the terms of the GNU General Public License as published by
 * the Free Software Foundation, either version 3 of the License, or
 * (at your option) any later version.
 *
 * This program is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
 *
 * You should have received a copy of the GNU General Public License
 * along with this program.  If not, see <http://www.gnu.org/licenses/>.
 */
package sk.lieskove.jianghongtiao.paris.core.classify.old;

import sk.lieskove.jianghongtiao.common.utils.PropertiesUtils;
import org.apache.log4j.Logger;
import sk.lieskove.jianghongtiao.common.utils.FileUtils;
import weka.core.Attribute;
import weka.core.Instance;
import weka.core.Instances;
import java.io.File;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import sk.lieskove.jianghongtiao.websearch.persistence.ClassificationStorage;
import weka.core.DenseInstance;
import weka.filters.Filter;
import weka.filters.unsupervised.attribute.RemoveByName;
import weka.filters.unsupervised.attribute.StringToWordVector;

/**
 * 
 * @author xjuraj e-mail: jjurco.sk_gmail.com
 */
public class ArffThread extends Thread implements Serializable {
    
    private transient Logger log = Logger.getLogger(ArffThread.class);
    private transient PropertiesUtils pu = new PropertiesUtils(ArffThread.class);
    
    private List<ClassificationStorage> set;
    private List<String> docTypesP;
    private String joinString;
    private List<String> language;
    private int wordsToKeep;
    
    private Instances modifiedInstances;

    /**
     * 
     * @param set retrieved classification items from the DB
     * @param docTypesP document types to leave
     * @param joinString join string of document types
     * @param language language 
     * @param wordsToKeep
     * @param name 
     */
    public ArffThread(List<ClassificationStorage> set, List<String> docTypesP, 
            String joinString, List<String> language, int wordsToKeep, String name) {
        super(name);
        this.set = set;
        this.docTypesP = docTypesP;
        this.joinString = joinString;
        this.language = language;
        this.wordsToKeep = wordsToKeep;
    }
    
    @Override
    public void run() {
        Instances data = createInstances(language, docTypesP, set, joinString);
        modifiedInstances = modifyInstances(data, wordsToKeep);
//        modifiedInstances = data;
    }

    public Instances getModifiedInstances() {
        return modifiedInstances;
    }
    
    /**
     * check for final type (class) of the document. If classes was put together
     * final type is different. otherwise it is normal type
     * @param joinItems joined types
     * @param pageClass original type (class) of the document (classified)
     * @return 
     */
    private String getDocClass(Map<String, String> joinItems, String pageClass){
        if(joinItems.containsKey(pageClass.toLowerCase())){
            return joinItems.get(pageClass.toLowerCase());
        } else {
            return pageClass.toLowerCase();
        }
    }
    
    /**
     * put together original and joined Document Types (Classes)
     * @param docType original types
     * @param joinItems joined types
     * @return final set of classes
     */
    private List<String> getDocTypes(List<String> docType, Map<String, String> joinItems){
        Set<String> result = new HashSet<String>(joinItems.values());
        Set<String> keys = new HashSet<String>();
        for (String string : docType) {
            keys.add(string.toLowerCase());
        }
        keys.removeAll(joinItems.keySet());
        result.addAll(keys);
        return new ArrayList<String>(result);
    }
    
    /**
     * 
     * @param languages
     * @param docType
     * @param items
     * @param joinString
     * @return 
     */
    public Instances createInstances(List<String> languages, List<String> docType,
            List<ClassificationStorage> items, String joinString) {

        if ((items == null) || (items.isEmpty())) {
            return null;
        }
        
        Map<String, String> joinItems = ClassifyManager.joinItems(joinString);
        List<String> docTypes = getDocTypes(docType, joinItems);
        
        try {
            Instance i = new DenseInstance(3);
            Attribute pageClass = new Attribute("page_class", docTypes);
            Attribute pageLang = new Attribute("page_language", languages);
            Attribute pageContent = new Attribute("page_content", (List<String>) null);

            ArrayList<Attribute> atts = new ArrayList<Attribute>(3);
            atts.add(pageClass);
            atts.add(pageLang);
            atts.add(pageContent);

            // 3. fill with data
            Instances data = new Instances("Paris", atts, 0);
            for (ClassificationStorage item : items) {

                String textData = FileUtils.read(new File(item.getWebSearchResponse().getFileLocation()));
                if (textData == null) {
                    textData = "null";
                }

                String lang = (item.getWebSearchResponse().getLanguageName() != null)
                        ? item.getWebSearchResponse().getLanguageName().name() : "null";
                String type = (item.getDocumentType() != null)
                        ? item.getDocumentType().name().toLowerCase() : "null";

                i.setValue(pageClass, docTypes.indexOf(getDocClass(joinItems, type)));
                i.setValue(pageLang, languages.indexOf(lang));
                i.setValue(pageContent, textData);

                data.add(i);
            }
            data.setClass(pageClass);
            return data;
        } catch (IOException ex) {
            log.error("Cannot save ARFF file.");
        }
        return null;
    }
    
    public Instances modifyInstances(Instances data, int wordsToKeep) {
        StringToWordVector stringToWordVector = new StringToWordVector();
        File sw = new File("/home/xjuraj/Projects/master-thesis/Paris/resources"
                + "/stopwords/inf-man-stop-words.cs");
        RemoveByName removeByName = new RemoveByName();
        if (sw.exists()) {
            stringToWordVector.setStopwords(sw);
        } else {
            log.warn("Stop words not found! Filename: "+sw.getAbsolutePath());
        }
//        stringToWordVector.setIDFTransform(false);
//        stringToWordVector.setTFTransform(false);
//        stringToWordVector.setAttributeIndices("first-last");
//        stringToWordVector.setDoNotOperateOnPerClassBasis(false);
//        stringToWordVector.setInvertSelection(false);
//        stringToWordVector.setLowerCaseTokens(false);
//        stringToWordVector.setMinTermFreq(1);
//        //stringToWordVector.setNormalizeDocLength(new SelectedTag);
//        stringToWordVector.setOutputWordCounts(false);
//        stringToWordVector.setPeriodicPruning(-1.0);
//        stringToWordVector.setUseStoplist(false);
        stringToWordVector.setWordsToKeep(wordsToKeep);
        Instances result = null;
        try {
            stringToWordVector.setInputFormat(data);
            result = Filter.useFilter(data, stringToWordVector);

            removeByName.setExpression("(\\d+)|(.)");
            removeByName.setInvertSelection(false);
            removeByName.setInputFormat(result);
            result = Filter.useFilter(result, removeByName);
        } catch (Exception ex) {
            log.error("Exception thrown while modifying instances with StringToWordVector");
        }

        return result;
    }

}
