/*
 Copyright (c) 2014-2021 by Contributors

 Licensed under the Apache License, Version 2.0 (the "License");
 you may not use this file except in compliance with the License.
 You may obtain a copy of the License at

 http://www.apache.org/licenses/LICENSE-2.0

 Unless required by applicable law or agreed to in writing, software
 distributed under the License is distributed on an "AS IS" BASIS,
 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 See the License for the specific language governing permissions and
 limitations under the License.
 */
package com.whirl.ai_ml_example;

import com.whirl.ai_ml_example.util.DataLoader;
import ml.dmlc.xgboost4j.java.Booster;
import ml.dmlc.xgboost4j.java.DMatrix;
import ml.dmlc.xgboost4j.java.XGBoost;
import ml.dmlc.xgboost4j.java.XGBoostError;
//import ml.dmlc.xgboost4j.java.example.util.DataLoader;

import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Arrays;
import java.util.HashMap;

import static com.whirl.util.Constants.XGBoost_PATH;

/**
 * a simple example of java wrapper for xgboost
 *
 * @author hzx
 */
public class BasicWalkThrough {
  public static boolean checkPredicts(float[][] fPredicts, float[][] sPredicts) {
    if (fPredicts.length != sPredicts.length) {
      return false;
    }
    System.out.println("一检通过");

    for (int i = 0; i < fPredicts.length; i++) {
      if (!Arrays.equals(fPredicts[i], sPredicts[i])) {
        System.out.println("二检失败");
        return false;
      }
    }
    System.out.println("二检通过");

    return true;
  }

  public static void saveDumpModel(String modelPath, String[] modelInfos) throws IOException {
    try{
      PrintWriter writer = new PrintWriter(modelPath, "UTF-8");
      for(int i = 0; i < modelInfos.length; ++ i) {
        writer.print("booster[" + i + "]:\n");
        writer.print(modelInfos[i]);
      }
      writer.close();
    } catch (Exception e) {
      e.printStackTrace();
    }
  }

  public static void main(String[] args) throws IOException, XGBoostError {
    // load file from text file, also binary buffer generated by xgboost4j
    DMatrix trainMat = new DMatrix(XGBoost_PATH+"demo/data/agaricus.txt.train");
    DMatrix testMat = new DMatrix(XGBoost_PATH+"demo/data/agaricus.txt.test");

    HashMap<String, Object> params = new HashMap<String, Object>();
    params.put("eta", 1.0); // eta[默认0.3]：和learning rate类似，通过减小每一步的权重，可以提高模型的鲁棒性。
    params.put("max_depth", 2);  // max_depth[默认6]：这个值为树的最大深度，也是用来避免过拟合的。max_depth越大，模型会学到更具体更局部的样本，需要使用CV函数来进行调优。
    params.put("silent", 0);
    params.put("objective", "binary:logistic"); // binary:logistic 二分类的逻辑回归，返回预测的概率(不是类别)。binary:logitraw

    params.put("eval_metric", "error"); // error 误差，这是默认的评价指标。
    HashMap<String, DMatrix> watches = new HashMap<String, DMatrix>();
    watches.put("train", trainMat);
    watches.put("test", testMat);

    //set round
    int round = 2;

    //train a boost model 训练集训练结果
    Booster booster = XGBoost.train(trainMat, params, round, watches, null, null);

    //predict  使用测试集预测结果
    float[][] predicts = booster.predict(testMat);

    //save model to modelPath
    File file = new File("./model");
    if (!file.exists()) {
      file.mkdirs();
    }

    // 保存模型
    String modelPath = "./model/xgb.model";
    booster.saveModel(modelPath);

    //dump model with feature map
    String[] modelInfos = booster.getModelDump(XGBoost_PATH+"demo/data/featmap.txt", false);
    saveDumpModel("./model/dump.raw.txt", modelInfos);

    //save dmatrix into binary buffer
    testMat.saveBinary("./model/dtest.buffer");

    //reload model and data
    // 加载模型，使用第二个测试集来测试
    Booster booster2 = XGBoost.loadModel("./model/xgb.model");
    DMatrix testMat2 = new DMatrix("./model/dtest.buffer");
    float[][] predicts2 = booster2.predict(testMat2);


    //check the two predicts
    // 检查两次预测的结果是否一致
    System.out.println(checkPredicts(predicts, predicts2));

    // loadSVMFile 加载svm文件
    System.out.println("start build dmatrix from csr sparse data ...");
    //build dmatrix from CSR Sparse Matrix
    DataLoader.CSRSparseData spData = DataLoader.loadSVMFile(XGBoost_PATH+"demo/data/agaricus.txt.train");

    DMatrix trainMat2 = new DMatrix(spData.rowHeaders, spData.colIndex, spData.data,
                                    DMatrix.SparseType.CSR, 127);
    // rowHeaders：样本数据个数
    // colIndex：特征数据的特征编号
    // data：特征数据的特征数值
    trainMat2.setLabel(spData.labels);
//    System.out.println("labels: ");
//    for (float label : spData.labels) {
//        System.out.println(label);
//    }


    //specify watchList
    HashMap<String, DMatrix> watches2 = new HashMap<String, DMatrix>();
    watches2.put("train", trainMat2);
    watches2.put("test", testMat2);
    Booster booster3 = XGBoost.train(trainMat2, params, round, watches2, null, null);
    float[][] predicts3 = booster3.predict(testMat2);

    //check predicts
    System.out.println(checkPredicts(predicts, predicts3));
  }
}
