/*
 * Copyright (C) 2012 JiangHongTiao
 *
 * This program is free software: you can redistribute it and/or modify
 * it under the terms of the GNU General Public License as published by
 * the Free Software Foundation, either version 3 of the License, or
 * (at your option) any later version.
 *
 * This program is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
 *
 * You should have received a copy of the GNU General Public License
 * along with this program.  If not, see <http://www.gnu.org/licenses/>.
 */
package sk.lieskove.jianghongtiao.paris.core;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import sk.lieskove.jianghongtiao.commonpersist.Persist;
import sk.lieskove.jianghongtiao.paris.core.classify.LearnSetCreator;
import sk.lieskove.jianghongtiao.paris.core.classify.ThreadArffMaker;
import sk.lieskove.jianghongtiao.websearch.document.preprocessing.SupportedLanguages;
import sk.lieskove.jianghongtiao.websearch.enums.DocumentType;
import sk.lieskove.jianghongtiao.websearch.persistence.ClassificationStorage;
import weka.classifiers.Classifier;
import weka.classifiers.CostMatrix;
import weka.classifiers.Evaluation;
import weka.classifiers.bayes.NaiveBayes;
import weka.classifiers.bayes.NaiveBayesUpdateable;
import weka.classifiers.meta.CostSensitiveClassifier;
import weka.classifiers.trees.J48;
import weka.classifiers.trees.RandomForest;

/**
 *
 * @author xjuraj
 */
public class CostMatrixFinder {

    private static List<String> languages = new ArrayList<String>();
    private static Integer wordsToKeep = 1000;
    private static String joinDocTypesString = "";
    private static Double baseline = 50.0;
    private static DocumentType[] documentTypes = new DocumentType[]{
        DocumentType.INFORMATICS, DocumentType.OTHER, DocumentType.INFO_MANAGER, 
        DocumentType.MANAGER};

    /**
     * @param args the command line arguments
     */
    public static void main(String[] args) {
        Persist.getSingleton();
        languages.add("Czech");
        SupportedLanguages language = SupportedLanguages.valueOf(languages.get(0));

        //get train/learn set (one is enough?)
        LearnSetCreator learnSet = new LearnSetCreator();
        Map<String, List<ClassificationStorage>> trainSet =
                learnSet.createLearnSet(baseline, documentTypes, language, joinDocTypesString);
        Map<String, List<ClassificationStorage>> testSet =
                learnSet.createLearnSet(baseline, documentTypes, language, joinDocTypesString);

        ThreadArffMaker trainArff = new ThreadArffMaker(trainSet,
                "Arff train set create", languages, wordsToKeep);
        ThreadArffMaker testArff = new ThreadArffMaker(testSet,
                "Arff test set create", languages, wordsToKeep);
        trainArff.start();
        testArff.start();
        try {
            trainArff.join();
            testArff.join();
        } catch (InterruptedException ex) {
        }
        //set initial values for cost matrix 1,1,1,1?
        Map<Evaluation, CostMatrix> evaluations = new HashMap<Evaluation, CostMatrix>();
        for (int i = 8; i < 10; i++) {
            for (int j = 0; j < 10; j++) {
                for (int k = 0; k < 10; k++) {
                    for (int l = 0; l < 10; l++) {
                        System.out.println("Build: " + i + "," + j + "," + k + "," + l);
                        CostMatrix cm = new CostMatrix(documentTypes.length);
                        CostSensitiveClassifier csc = new CostSensitiveClassifier();
                        Classifier c = new J48();
                        try {
                            Evaluation e = new Evaluation(testArff.getInstances());
                            int I = i+1;
                            int J = j+1;
                            int K = k+1;
                            int L = l+1;
                            
//                            cm.setElement(0, 1, 0.1 * i);
//                            cm.setElement(1, 0, 0.1 * j);
                            cm.setElement(0, 1, 0.1*I);
                            cm.setElement(0, 2, 0.1*I);
                            cm.setElement(0, 3, 0.1*I);
                            cm.setElement(1, 0, 0.1*J);
                            cm.setElement(1, 2, 0.1*J);
                            cm.setElement(1, 3, 0.1*J);
                            cm.setElement(2, 0, 0.1*K);
                            cm.setElement(2, 1, 0.1*K);
                            cm.setElement(2, 3, 0.1*K);
                            cm.setElement(3, 0, 0.1*L);
                            cm.setElement(3, 1, 0.1*L);
                            cm.setElement(3, 2, 0.1*L);

                            csc.setCostMatrix(cm);
                            csc.setClassifier(c);
                            try {
                                csc.buildClassifier(trainArff.getInstances());
                            } catch (Exception ex) {
                                System.out.println("Cannot build classifier! " + ex);
                            }
                            e.evaluateModel(c, testArff.getInstances());
                            evaluations.put(e, cm);
                        } catch (Exception ex) {
                            System.out.println("Cannot create evaluation! " + ex);
                        }

                    }
                }
            }
        }
        System.out.println("Accuracy\tPrecision\tRecall\tCostMatrix");
        for (Evaluation evaluation : evaluations.keySet()) {
            CostMatrix cm = evaluations.get(evaluation);
            System.out.println(
                    evaluation.pctCorrect() + "\t"
                    + evaluation.weightedPrecision() + "\t"
                    + evaluation.weightedRecall() + "\t"
                    + cm.toMatlab());
        }
        //train classifier

        //evaluate classifier

        //remember results

        //print results with Accuracy, Precision, Recall

    }
}
