package cn.edu.fudan.rule;

import cn.edu.fudan.data.*;
import cn.edu.fudan.tools.ClassifierSummary;
import cn.edu.fudan.tools.GetConfig;
import cn.edu.fudan.type.*;
import org.apache.log4j.Logger;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

/**
 * Created by 80494 on 2017/4/12.
 */
public class WaveletRuleClassfierForExplosion {
    private static Logger logger = Logger.getLogger(WaveletRuleClassifier.class);

    //private static PAMAPDimension dimension = PAMAPDimension.z_ankle;
    //public WaveletRuleClassifier(String subject) {
    //public static void main(String[] args) throws IOException {
    public void WaveletRuleClassifierForExplosion(double support, double confidence) {
        System.setProperty("java.util.Arrays.useLegacyMergeSort", "true");
        Config config;
        try {
            config = new GetConfig().getConfig();
            logger.info(config);

            logger.info(
                    "==================================================================================================================");
            logger.info(config);
            logger.info(
                    "===================================================================================================================");
            ReadData readData = new ReadData();
            ExtractFeature extractFeature = new ExtractFeature();
            ClassifierSummary classifierSummary = new ClassifierSummary();
            SlideWindow slideWindow = new SlideWindow();
            WaveletPre waveletPre = new WaveletPre();
            RulePruning rulePruning = new RulePruning();
            RuleGeneration ruleGeneration = new RuleGeneration();
            Classifier classifier = new Classifier();

            List<List<DataItem>> database = new ArrayList<>();
            List<WaveletPerOrder> trainpre = new ArrayList<>();
            List<WaveletPerOrder> testpre = new ArrayList<>();
            List<Rule> ruleset = new ArrayList<>();

            List<List<Long>> trainpoints = new ArrayList<>();
            List<List<Long>> testpoints = new ArrayList<>();


            int count = 0;
            double accuracy = 0.0;


            String path = config.getPath();

            int[] datasets = new int[]{103, 148, 183, 260, 293, 424, 456, 511, 721, 850, 878, 952, 1168, 1468, 1570, 1788, 1814, 1874, 1881, 1896, 2064, 2067, 2108, 2240, 2433, 2469, 2484, 2664, 2760, 2814, 2988, 3011, 3067, 3130, 3207, 3275, 3305, 3306, 3534, 3539, 3541, 3551, 3577, 3606, 3698, 3718, 3741, 3844, 3961, 3982, 4001, 4022, 4303, 4304, 4318, 4564, 4611, 4677, 4810, 4907, 4955, 5042, 5166, 5481, 5911, 5943, 5946, 5947, 5949, 5988, 6008, 6064, 6102, 6118, 6119, 6121, 6157, 6283, 6302, 6328, 6344, 6385, 6405, 6435, 6514, 6582, 6594, 6604, 6709, 6768, 6775, 6779, 6791, 6799, 6801, 6862, 6920, 6946, 6962, 6971, 6976, 6998, 7053, 7128, 7241, 7265, 7288, 7310, 7316, 7369, 7387, 7414, 7417, 7474, 7476, 7520, 7530, 7533, 7537, 7541, 7543, 7553, 7567, 7570, 7574, 7664, 7665, 7715, 7740, 7765, 7770, 7794, 7799, 7888, 7889, 7899, 7900, 7903, 7909, 7910, 7911, 8076, 8256, 8273, 8325, 8330, 8540, 8572, 8669, 8832, 8983, 9145, 9252, 9307, 9825, 9831, 10184, 10269, 10831, 11063, 11130, 11136, 11313, 11345, 11512, 11518, 11830, 11868, 11960, 11962, 12192, 12515, 12555, 12708, 12721, 12764, 12807, 12825, 12987, 13014, 13113, 13665, 13684, 13845, 13847, 13866, 13939, 13962, 14108, 14197, 14285, 14330, 14364, 14663, 14790, 14798, 14805, 14930, 15201, 15313
            };

            List<DataItem> rawdatas = new ArrayList<>();
            try {
                for (int i = 0; i < datasets.length; i++) {
                    String filePath = path + "data\\" + datasets[i];
                    List<DataItem> rawdata = readData.readDataFromFile(filePath, true);
                    rawdatas.addAll(rawdata);
                }

                String trainPath = path + "train.txt";
                trainpoints = readData.readTimePoint(trainPath, false);
                String testPath = path + "test.txt";
                testpoints = readData.readTimePoint(testPath, false);
            } catch (Exception e) {
                // TODO Auto-generated catch block e.printStackTrace();
            }


            if (rawdatas.size() > 0) {
                long begin = System.currentTimeMillis();
                Feature feature = extractFeature.getFeature(rawdatas, config.getThreshold_window(), config.getProbability(),
                        config.getInterval());

                // two-class
                List<List<DataItem>> data_A = slideWindow.extractWindow(feature.getAbnormal(), trainpoints.get(0),
                        config.getWindow_length());
                List<List<DataItem>> data_B = slideWindow.extractWindow(feature.getAbnormal(), trainpoints.get(1),
                        config.getWindow_length());

                List<List<DataItem>> test_A = slideWindow.extractWindow(feature.getAbnormal(), testpoints.get(0),
                        config.getWindow_length());
                List<List<DataItem>> test_B = slideWindow.extractWindow(feature.getAbnormal(), testpoints.get(1),
                        config.getWindow_length());

                List<List<DataItem>> trainingrawdata = data_A;
                trainingrawdata.addAll(data_B);
                List<List<DataItem>> testrawdata = test_A;
                testrawdata.addAll(test_B);

                long start = System.currentTimeMillis();


                WaveletPerOrder trainpre_a = new WaveletPerOrder();
                WaveletPerOrder trainpre_b = new WaveletPerOrder();
                WaveletPerOrder testpre_b = new WaveletPerOrder();
                WaveletPerOrder testpre_a = new WaveletPerOrder();

                for (int i = 0; i < data_A.size(); i++) {
                    trainpre_a = waveletPre.WaveletPre(data_A.get(i));
                    trainpre_a.setLabel(0);
                    trainpre.add(trainpre_a);
                }
                for (int i = 0; i < data_B.size(); i++) {
                    trainpre_b = waveletPre.WaveletPre(data_B.get(i));
                    trainpre_b.setLabel(1);
                    trainpre.add(trainpre_b);
                }

                //RuleGeneration(List<WaveletPerOrder> traindataset, double minsupport, double minconfidence, int maxgap, int kclass)

                long rulegenstart = System.currentTimeMillis();
                ruleset = ruleGeneration.RuleGeneration(trainpre, support, confidence, trainpre.size(), 2);
                long rulegenend = System.currentTimeMillis();

                TwoTuple<List<Rule>, Integer> twotuple = rulePruning.RulePruning(ruleset, trainpre, 2);


                for (int i = 0; i < test_A.size(); i++) {
                    testpre_a = waveletPre.WaveletPre(test_A.get(i));
                    testpre_a.setLabel(0);
                    testpre.add(testpre_a);
                }
                for (int i = 0; i < test_B.size(); i++) {
                    testpre_b = waveletPre.WaveletPre(test_B.get(i));
                    testpre_b.setLabel(1);
                    testpre.add(testpre_b);
                }


                for (int i = 0; i < testrawdata.size(); i++) {
                    if (classifier.Classifier(testrawdata.get(i), twotuple.first, twotuple.second) == testpre.get(i).getLabel()) {
                        count++;
                    }
                }
                accuracy = (double) count / (double) testrawdata.size();

                long end = System.currentTimeMillis();
                logger.info("The support is set to " + support + ", and confidence is set to " + confidence);
                logger.info("Got " + ruleset.size() + "rules");
                logger.info("Run for " + (rulegenstart - rulegenend) / (double) 1000 + "seconds  for generate rule.");
                logger.info("Run for " + (end - start) / (double) 1000 + "seconds.");
                logger.info("Accuracy is:" + accuracy);
            }


        } catch (IOException e1) { // TODO Auto-generated catch block
            e1.printStackTrace();
        }
    }
}
