/**
 * @date 2015年12月23日 下午2:56:12
 * @version V1.0
 */
package edu.cn.nlsde.tmfst.EVA;

import edu.cn.nlsde.tmfst.my_common.GenerateCsv;
import weka.classifiers.evaluation.Prediction;
import de.classifiers.functions.LibLINEAR;
import weka.core.Instances;
import weka.core.converters.CSVLoader;
import weka.classifiers.evaluation.Evaluation;

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Date;
import java.util.List;
import java.util.Random;

/**
 * @author ffftzh
 * @ClassName: liblinear
 * @Description: TODO
 * @date 2015年12月23日 下午2:56:12
 */
public class liblinears {

    public static void main(String[] args) throws Exception {
        String[] dataSetList = {
                "news",
//				"dblp-6",
//				"new-tweet",
//				"baiduQA"
        };
//        BufferedWriter allWriter = IOUtil.getWriter("C:/Users/ffftzh/Desktop/result.txt");
        for (String dataSet : dataSetList) {
//            allWriter.write(dataSet + ":\n");
//            allWriter.write("myLDA:\n");
//            evaluate(allWriter, "data/" + dataSet + "/" + dataSet + ".data.theta", "data/" + dataSet + "/" + dataSet + ".tag", 5);
//            allWriter.write("LDA:\n");
//            evaluate(allWriter, "data/" + dataSet + "/PAM4ST_with_case_1000_100_0.1_0.1_0.01/0/model-final.theta", "data/" + dataSet + "/" + dataSet + ".tag", 5);
//            allWriter.flush();
        }

//        allWriter.close();
    }
//	public static double evaluateTrain(BufferedWriter allWriter,String path,String tag,double trainRate, int rates) throws Exception{
//		my_common.GenerateCsv ooline = new my_common.GenerateCsv();
//		ooline.generateCsvForLDA(path,tag,trainRate,rates);
//		 
//		String filename1 = path+".csv";
//	//	System.out.println(filename1);
//		CSVLoader loader1 = new CSVLoader();
//		loader1.setFile(new File(filename1));
//		Instances dataset = loader1.getDataSet();
//		dataset.setClass(dataset.attribute("Label"));
//		LibLINEAR svm = new LibLINEAR();
////		RandomForest randomForest =new RandomForest();
////		System.out.println(dataset.size());
//		dataset.randomize(new Random());
//		Instances train_dataSet =new Instances(dataset,0,rates*dataset.numClasses());
//		train_dataSet.setClass(train_dataSet.attribute("Label"));
//		svm.buildClassifier(train_dataSet);
//		
//		
//		
//		
//		List<Instance> test_dataSet = dataset.subList((int)(dataset.size()*trainRate),dataset.size());
//		HashMap<Double, Integer> predictCountSet = new HashMap<Double, Integer>();
//		HashMap<Double, Integer> rightCountSet = new HashMap<Double, Integer>();
//		HashMap<Double, Integer> truetCountSet = new HashMap<Double, Integer>();
//		int all=test_dataSet.size();
//		for(Instance item:test_dataSet)
//		{
//			double trueclass=item.classValue();
//			if(!predictCountSet.containsKey(trueclass)){
//				predictCountSet.put(trueclass, 0);
//				truetCountSet.put(trueclass, 0);
//				rightCountSet.put(trueclass, 0);
//			}
//			double preclass =svm.classifyInstance(item);
//			if(!predictCountSet.containsKey(preclass)){
//				predictCountSet.put(preclass, 0);
//				truetCountSet.put(preclass, 0);
//				rightCountSet.put(preclass, 0);
//			}
//			predictCountSet.put(preclass, predictCountSet.get(preclass)+1);
//			truetCountSet.put(trueclass, truetCountSet.get(trueclass)+1);
//			if(preclass==trueclass){
//				rightCountSet.put(trueclass, rightCountSet.get(trueclass)+1);
//			}
//		}
//		double result=0;
//		for(double classvalue:truetCountSet.keySet()){
//			double recall=0,precision=0;
//			recall=(double)rightCountSet.get(classvalue)/truetCountSet.get(classvalue);
//			if(predictCountSet.get(classvalue)!=0){
//				precision = (double)rightCountSet.get(classvalue)/predictCountSet.get(classvalue);
//			}
//			if(recall+precision>0){
//				result += truetCountSet.get(classvalue)*2*recall*precision/(recall+precision);
//			}
//			System.out.println(recall+" "+precision+" "+result);
//			
//		}
//		
//		System.out.println(result/all);
//		return result/all;
//		
//	}

    public static double evaluate(String path, String tag, int times) throws IOException {
        return evaluate(path, tag, times, false);
    }

//    public static double evaluate_svm_format(String path, int times) throws IOException {
//        Boolean silence = false;
//
//        LibSVMLoader loader = new LibSVMLoader();
//        try {
//            loader.setFile(new File(path));
//            Instances dataset = loader.getDataSet();
//            NumericToNominal numToNom = new NumericToNominal();
//            numToNom.setAttributeIndices("" + (dataset.classIndex() + 1));
//            numToNom.setInputFormat(dataset);
//            dataset = Filter.useFilter(dataset, numToNom);
////            dataset.setClass(dataset.attribute("class"));
////            dataset.setClassIndex( dataset.numAttributes() - 1 );
//            //List<Instance> sub = dataset.subList(0, 100);
//            LibLINEAR svm = new LibLINEAR();
////            weka.classifiers.functions.SMO svm = new weka.classifiers.functions.SMO();
////            Logistic svm = new Logistic();
////			RandomForest randomForest =new RandomForest();
//            try {
//                Evaluation eval = new Evaluation(dataset);
//                if (!silence) {
//                    System.out.println(new Date());
//                }
//                eval.crossValidateModel(svm, dataset, times, new Random(1));
//                //eval.evaluateModel(svm, dataset);
//                if (!silence) {
//                    System.out.println(
//                            "----------------eval start-------------------");
//
////                allWriter.append("----------------eval start-------------------" + "\n\n");
//                    System.out.println(eval.toClassDetailsString());
//                }
////                allWriter.append(eval.toClassDetailsString() + "\n\n");
////				System.out.println(eval.toSummaryString());
//                System.out.println(eval.toMatrixString());
////                System.out.println(new Date());
////                writer.append(eval.toClassDetailsString() + "\n");
////                writer.append(eval.toSummaryString() + "\n");
////                writer.append(eval.toMatrixString() + "\n");
////                List<Prediction> resultlist = eval.predictions();
////                for (Prediction one : resultlist) {
////                    writer.write(one.actual() + " " + one.predicted() + " " + one.weight() + "\n");
////                }
////                writer.flush();
////                writer.close();
//                return eval.weightedFMeasure();
//            } catch (Exception e) {
//                e.printStackTrace();
//            }
//        } catch (IOException e) {
//            e.printStackTrace();
//        } catch (Exception e) {
//            e.printStackTrace();
//        }
//        return -1;
//    }

    public static double evaluate(String path, String tag, int times, boolean silence) throws IOException {

        GenerateCsv ooline = new GenerateCsv();
        ooline.generateCsvForLDA(path, tag);
        BufferedWriter writer = new BufferedWriter(new FileWriter(path + ".csv.arff"));

        CSVLoader loader = new CSVLoader();
        try {
            loader.setFile(new File(path + ".csv"));
            Instances dataset = loader.getDataSet();
            dataset.setClass(dataset.attribute("Label"));
            //List<Instance> sub = dataset.subList(0, 100);
            LibLINEAR svm = new LibLINEAR();
//            weka.classifiers.functions.SMO svm = new weka.classifiers.functions.SMO();
//            Logistic svm = new Logistic();
//			RandomForest randomForest =new RandomForest();
            try {
                Evaluation eval = new Evaluation(dataset);
                if (!silence) {
                    System.out.println(new Date());
                }
                eval.crossValidateModel(svm, dataset, times, new Random(1));
                //eval.evaluateModel(svm, dataset);
                if (!silence) {
                    System.out.println(
                            "----------------eval start-------------------");

//                allWriter.append("----------------eval start-------------------" + "\n\n");
                    System.out.println(eval.toClassDetailsString());
                }
//                allWriter.append(eval.toClassDetailsString() + "\n\n");
//				System.out.println(eval.toSummaryString());
                System.out.println(eval.toMatrixString());
//                System.out.println(new Date());
//                writer.append(eval.toClassDetailsString() + "\n");
//                writer.append(eval.toSummaryString() + "\n");
//                writer.append(eval.toMatrixString() + "\n");
                List<Prediction> resultlist = eval.predictions();
                for (Prediction one : resultlist) {
                    writer.write(one.actual() + " " + one.predicted() + " " + one.weight() + "\n");
                }
                writer.flush();
                writer.close();
                return eval.weightedFMeasure();
            } catch (Exception e) {
                e.printStackTrace();
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
        return -1;
    }

//    public static double evaluate(String arffpath, int times, boolean silence) throws IOException {
//        BufferedWriter writer = new BufferedWriter(new FileWriter(arffpath + ".csv.arff"));
//
//        ArffLoader loader = new ArffLoader();
//        try {
//            loader.setFile(new File(arffpath));
//            Instances dataset = loader.getDataSet();
//            dataset.setClass(dataset.attribute("class"));
//            //List<Instance> sub = dataset.subList(0, 100);
//            LibLINEAR svm = new LibLINEAR();
////			RandomForest randomForest =new RandomForest();
//            try {
//                Evaluation eval = new Evaluation(dataset);
//                if (!silence) {
//                    System.out.println(new Date());
//                }
//                eval.crossValidateModel(svm, dataset, times, new Random(1));
//
//                //eval.evaluateModel(svm, dataset);
//                if (!silence) {
//                    System.out.println(
//                            "----------------eval start-------------------");
//
////                allWriter.append("----------------eval start-------------------" + "\n\n");
//                    System.out.println(eval.toClassDetailsString());
//                }
////                allWriter.append(eval.toClassDetailsString() + "\n\n");
////				System.out.println(eval.toSummaryString());
//                //	System.out.println(eval.toMatrixString());
////                System.out.println(new Date());
////                writer.append(eval.toClassDetailsString() + "\n");
////                writer.append(eval.toSummaryString() + "\n");
////                writer.append(eval.toMatrixString() + "\n");
//                writer.flush();
//                writer.close();
//                return eval.weightedFMeasure();
//            } catch (Exception e) {
//                // TODO Auto-generated catch block
//                e.printStackTrace();
//            }
//        } catch (IOException e) {
//            // TODO Auto-generated catch block
//            e.printStackTrace();
//        }
//        return -1;
//    }

//    public static void train_test(String train_path, String train_tag_path, String test_path, String test_tag_path) throws Exception {
//        GenerateCsv ooline = new GenerateCsv();
//        int split = ooline.generateCsvForLDA(train_path, train_tag_path, test_path, test_tag_path);
//
//        CSVLoader loader = new CSVLoader();
//        loader.setFile(new File(train_path + ".csv"));
//        Instances dataset = loader.getDataSet();
//        dataset.setClass(dataset.attribute("Label"));
//        Instances train = new Instances(dataset, 0, split);
//        Instances test = new Instances(dataset, split, dataset.size() - split);
//
////        Logistic svm = new Logistic();
//        LibLINEAR svm = new LibLINEAR();
//        svm.buildClassifier(train);
//        double[] result = new double[test.size()];
//        int sum = 0;
//        for (int i = 0; i < result.length; i++) {
//            result[i] = svm.classifyInstance(test.instance(i));
//            System.out.println(result[i] + "\t" + test.instance(i).classValue());
//            if (Math.abs(result[i] - test.instance(i).classValue()) < 0.5) {
//                sum++;
//            }
//        }
//        System.out.println(sum);
//        System.out.println(((double) sum) / test.size());
//
//    }
}
