package fit;

import cloud.tianai.captcha.validator.common.model.dto.ImageCaptchaTrack;
import cloud.tianai.csv.CsvWriter;
import cloud.tianai.csv.CsvWriterBuilder;
import cloud.tianai.csv.Path;
import cloud.tianai.mate.captcha.validator.common.util.TrackUtils;
import cloud.tianai.neuron.common.Matrix;
import cloud.tianai.neuron.optimizer.OptimizerEnum;
import cloud.tianai.neuron.regression.LogisticRegression;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import lombok.Data;
import lombok.SneakyThrows;
import ml.dmlc.xgboost4j.java.Booster;
import ml.dmlc.xgboost4j.java.DMatrix;
import ml.dmlc.xgboost4j.java.XGBoost;
import ml.dmlc.xgboost4j.java.XGBoostError;

import java.io.*;
import java.util.*;

public class SliderCaptchaModelFit {


    public static void main(String[] args) throws IOException, XGBoostError {

        // 读取滑动轨迹
        Map<Double, ArrayList<Data1>> trackMap = readTrackMap();
        ArrayList<Data1> data1s = trackMap.get(2d);
        trackMap.remove(-99d);

        // 提取特征
        List<FeatureData> extractFeatures = extractFeatures(trackMap);
//        writeLibSvm(extractFeatures);

//        fit(extractFeatures);
//        fitForXgb(extractFeatures);
        predictFormXgb(extractFeatures);

    }

    private static void fitForXgb(List<FeatureData> extractFeatures) {

    }

    private static void predictFormXgb(List<FeatureData> extractFeatures) throws IOException, XGBoostError {
        String svmPath = "C:\\Users\\Thinkpad\\Desktop\\lr-test.csv";
        String saveModelPath = "src/main/resources/xgb-model.json";
        writeLibSvm(extractFeatures, svmPath);
        fitXgb(svmPath, saveModelPath);

        try {
            Booster model = XGBoost.loadModel(saveModelPath);
            int successNum = 0;
            for (FeatureData extractFeature : extractFeatures) {
                int label = extractFeature.label>=1 && extractFeature.label != 2? 1 : 0;
                List<Double> features = extractFeature.features;
                float[] array = new float[features.size()+1];
                array[0] = 1;
                for (int i = 0; i < features.size(); i++) {
                    array[i+1] = features.get(i).floatValue();
                }
                DMatrix dMatrix = new DMatrix(array, 1, array.length,0.0f);
                float[][] predicts = model.predict(dMatrix);
                float predict = predicts[0][0];
//                if ("121.230.60.217".equals(extractFeature.ip)) {
//                    System.out.println("11");
//                }
                if (label == 1 && predict > 0.5) {
                    // 正确的
                    successNum++;
                }else if (label == 0 && predict < 0.5) {
                    // 正确的
                    successNum++;
                }else {
                    System.out.println("=================预测失败================= ip:" + extractFeature.ip + ",id:" + extractFeature.id);
                }

//                System.out.println(predicts[0][0] + ":" + label);
            }

            System.out.println("准确率:" + ((double) successNum / extractFeatures.size() * 100));
        } catch (XGBoostError e) {
            e.printStackTrace();
        }

    }


    public static void fitXgb(String svmPath, String saveModelPath) throws XGBoostError {
        // https://github.com/criteo-forks/xgboost-jars/releases
        DMatrix dMatrix = new DMatrix(svmPath);
        Map<String, Object> params = new HashMap<String, Object>() {
            {
                // eta / learning_rate 学习率    推荐的候选值为：[0.01, 0.015, 0.025, 0.05, 0.1]
                put("eta", 0.1);
                //  指定树的最大深度，默认值为6，合理的设置可以防止过拟合。
                //  推荐的数值为：[3, 5, 6, 7, 9, 12, 15, 17, 25]。
                put("max_depth", 6);
                // min_child_weight[默认1]
                //表示最小叶子节点样本权重的和。可用于避免过拟合。当它的值较大时，可以避免模型学习到局部的特殊样本。但是如果这个值过高，会导致欠拟合。这个参数需要使用CV来调整。
                put("min_child_weight", 1);
                // 静默模式，为1时模型运行不输出。
                put("silent", 0);
                //  用于指定学习任务及相应的学习目标，常用的可选参数值如下：
                //       “reg:linear”，线性回归（默认值）。
                //       “reg:logistic”，逻辑回归。
                //       “binary:logistic”，二分类的逻辑回归问题，输出为概率。
                //       “multi:softmax”，采用softmax函数处理多分类问题，同时需设置参数num_class指定类别个数
                put("objective", "binary:logistic");
                //   指定叶节点进行分支所需的损失减少的最小值，默认值为0。设置的值越大，模型就越保守。
                //   推荐的候选值为：[0, 0.05 ~ 0.1, 0.3, 0.5, 0.7, 0.9, 1] **
                put("gamma", "0.02");
                // 默认值1，指定采样出 subsample * n_samples 个样本用于训练弱学习器。注意这里的子采样和随机森林不一样，随机森林使用的是放回抽样，而这里是不放回抽样。 取值在(0, 1)之间，设置为1表示使用所有数据训练弱学习器。如果取值小于1，则只有一部分样本会去做GBDT的决策树拟合。选择小于1的比例可以减少方差，即防止过拟合，但是会增加样本拟合的偏差，因此取值不能太低。
                //
                //       推荐的候选值为：[0.6, 0.7, 0.8, 0.9, 1]
                put("subsample", "0.8");
                //构建弱学习器时，对特征随机采样的比例，默认值为1。
                //
                //       推荐的候选值为：[0.6, 0.7, 0.8, 0.9, 1]
                put("colsample_bytree", "0.8");
                //  用于指定评估指标，可以传递各种评估方法组成的list。常用的评估指标如下：
                //
                //       ‘rmse’，用于回归任务
                //
                //       ‘mlogloss’，用于多分类任务
                //
                //       ‘error’，用于二分类任务
                //
                //       ‘auc’，用于二分类任务
                put("eval_metric", "auc");
                // 使用线程数，一般我们设置成-1,使用所有线程。如果有需要，我们设置成多少就是用多少线程。
                put("nthread", "-1");
                // https://blog.csdn.net/qq_36535820/article/details/120507280
            }
        };

        Map<String, DMatrix> watches = new HashMap<String, DMatrix>() {
            {
                put("train", dMatrix);
                put("test", dMatrix);
            }
        };

        int nround = 100;
        try {
            Booster booster = XGBoost.train(dMatrix, params, nround, watches, null, null);
            booster.saveModel(saveModelPath);
        } catch (XGBoostError xgBoostError) {
            xgBoostError.printStackTrace();
        }
    }

    public static void writeCsv(List<FeatureData> extractFeatures ) {
        CsvWriter csvWriter = CsvWriterBuilder.builder()
                .local(false)
                .memoryStorageCapacity(2048)
                .threshold(2048)
                .tempFileDirectory("C:\\Users\\Thinkpad\\Desktop")
                .fileName("lr-test.csv")
                .buildAndInit();
        // title
        csvWriter.append(Arrays.asList("trackCount","xMin","yMin","yMax","totalTime"
                ,"xBeforeBackNum","xAfterBackNum","tRatio","xStd","avgX","yStd","avgY"
                ,"tStd","xSameQuantityPercentage","ySameQuantityPercentage","tSameQuantityPercentage"
                ,"xAvgTimePercentage","xDiff","yDiff","tDiff","xPercentDiff","xPercentDiff2","yPercentDiff",
                "type"));
        for (FeatureData extractFeature : extractFeatures) {
            List<Double> features = extractFeature.features;
            double label = extractFeature.label;
            ArrayList<Object> objects = new ArrayList<>();
            objects.addAll(features);
            objects.add(label);
            csvWriter.append(objects);
        }
        Path path = csvWriter.finish();
//        InputStream inputStream = csvWriter.getInputStream();


    }

    public static void writeLibSvm(List<FeatureData> extractFeatures , String path) throws IOException {

        FileWriter fileWriter = new FileWriter(path);
        for (FeatureData extractFeature : extractFeatures) {
            List<Double> features = extractFeature.features;
            double label = extractFeature.label;
            ArrayList<Object> objects = new ArrayList<>();


            StringBuilder sb = new StringBuilder();
            sb.append(extractFeature.label >=1 && extractFeature.label != 2 ? 1 : 0);
            sb.append(" ");
            for (int i = 0; i < features.size(); i++) {
                sb.append(i+1).append(":").append(features.get(i)).append(" ");
            }
            objects.add(sb.toString());
            fileWriter.write(sb.toString() + "\n");
        }
        fileWriter.close();
    }

    public static void fit(List<FeatureData> extractFeatures) throws IOException {
        Matrix matrix = new Matrix(0, 0);
        Matrix label = new Matrix(0, 0);
        for (FeatureData extractFeature : extractFeatures) {
            matrix.add(extractFeature.features);
            ArrayList<Double> labels = new ArrayList<>();
            if (extractFeature.label >= 1) {
                labels.add(1d);
            } else {
                labels.add(0d);
            }
            label.add(labels);
        }

        LogisticRegression LR = new LogisticRegression();
        List<Double> loss = LR.fit(matrix, label, 0.01, 10000, 0, 2, 1, OptimizerEnum.BGD);
        System.out.println("loss:" + loss.get(0) + "," + loss.get(loss.size() - 1));
        System.out.println("thetas:" + LR.getThetas());
        int successNum = 0;
        for (int x = 0; x < matrix.size(); x++) {
            List<Double> doubles = matrix.get(x);
            Matrix matrix1 = new Matrix(0, 0);
            matrix1.add(doubles);
            Double predict = LR.predict(matrix1).get(0, 0);
            Double labelData = label.get(x, 0);
            if (labelData == 0 && predict < 0.5) {
                successNum++;
            } else if (labelData >= 1 && predict > 0.5) {
                successNum++;
            }
        }
        System.out.println("准确率:" + ((double) successNum / matrix.size() * 100));
        FileOutputStream fileOutputStream = new FileOutputStream("C:\\Users\\Thinkpad\\Desktop\\LR.model");
        ObjectOutputStream objectOutputStream = new ObjectOutputStream(fileOutputStream);
        objectOutputStream.writeObject(LR);
        objectOutputStream.close();
    }

    private static List<FeatureData> extractFeatures(Map<Double, ArrayList<Data1>> maps) {
        List<FeatureData> result = new ArrayList<>();
        maps.forEach((label, tracks) -> {
            for (Data1 track : tracks) {
                FeatureData featureData = new FeatureData();
                List<Double> features = TrackUtils.features(track);
                featureData.label = label;
                featureData.features = features;
                featureData.ip = track.ip;
                featureData.id = track.id;
                result.add(featureData);
//                if (label == 2 || label == 3) {
                    System.out.println("["+label +"] [" +track.id+ "]:" + features.get(features.size() - 1));
//                }
            }
        });

        return result;
    }

    @SneakyThrows
    private static Map<Double, ArrayList<Data1>> readTrackMap() {
        FileInputStream trackJson = new FileInputStream("C:\\Users\\Thinkpad\\Desktop\\82-track-5.json");
        InputStreamReader reader = new InputStreamReader(trackJson);
        List<JsonData> data = new Gson().fromJson(reader, new TypeToken<List<JsonData>>() {
        }.getType());
        Map<Double, ArrayList<Data1>> result = new HashMap<>();
        for (JsonData datum : data) {
            if ("121.230.60.217".equals(datum.getIp())) {
                datum.setY(-999d);
            }
            ArrayList<Data1> lists = result.computeIfAbsent(datum.getY(), k -> new ArrayList<Data1>());

            Data1 d1 = new Data1();
            d1.addAll(datum.getX());
            d1.ip = datum.ip;
            d1.id = datum.id;
            lists.add(d1);
        }
        // 把线上的随机抽取100条数据
        ArrayList<Data1> lists = result.get(3d);
        ArrayList<Data1> lists2 = new ArrayList<Data1>();
        for (int i = 0; i < 200; i++) {
            int index = new Random().nextInt(lists.size());
            Data1 tracks = lists.get(index);
            lists2.add(tracks);
        }
        ArrayList<Data1> data1s = result.get(2d);
        ArrayList<Data1> data1s2 = result.get(1d);
//        result.clear();
//        result.put(2d, data1s);
//        result.put(1d, data1s2);
        result.remove(3d);
        result.put(3d, lists2);
        return result;
    }

    @Data
    public static class JsonData {
        private List<ImageCaptchaTrack.Track> x;
        private Double y;
        private String ip;
        private String id;
    }


    public static class FeatureData {
        double label;
        List<Double> features;

        String ip;
        String id;

    }

    public static class Data1 extends ArrayList<ImageCaptchaTrack.Track> {
        String ip;

        String id;

    }
}

