package com.zhny.test;

import org.apache.parquet.Strings;
import org.apache.spark.SparkConf;
import org.apache.spark.SparkContext;

// $example on$
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.mllib.linalg.Vector;
import org.apache.spark.mllib.linalg.Vectors;
import scala.Tuple2;

import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.mllib.classification.LogisticRegressionModel;
import org.apache.spark.mllib.classification.LogisticRegressionWithLBFGS;
import org.apache.spark.mllib.evaluation.MulticlassMetrics;
import org.apache.spark.mllib.regression.LabeledPoint;
import org.apache.spark.mllib.util.MLUtils;

import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

//逻辑回归
public class LogisticRegressionUtil {
    public static void exc(String dataPathFile, String resultFilePath) {
        SparkConf conf = new SparkConf().setAppName("JavaLogisticRegressionWithLBFGSExample").setMaster("local[1]");
        conf.set("spark.driver.allowMultipleContexts", "true");
        SparkContext sc = new SparkContext(conf);

        String path = "";
        if (Strings.isNullOrEmpty(dataPathFile)) {
            path = "src/main/resources/data/LogisticRegressionData.txt";
        } else {
            path = dataPathFile;
        }

        JavaRDD<LabeledPoint> data = MLUtils.loadLibSVMFile(sc, path).toJavaRDD();

        // Split initial RDD into two... [60% training data, 40% testing data].
        JavaRDD<LabeledPoint>[] splits = data.randomSplit(new double[] {0.6, 0.4}, 11L);
        JavaRDD<LabeledPoint> training = splits[0].cache();
        JavaRDD<LabeledPoint> test = splits[1];

        // Run training algorithm to build the model.
        LogisticRegressionModel model = new LogisticRegressionWithLBFGS()
                .setNumClasses(10)
                .run(training.rdd());
        FileWriter fos = null;

        try {
            fos = new FileWriter(new File(resultFilePath));
            fos.write(model.toString() + "\n");

            // Compute raw scores on the test set.
            JavaPairRDD<Object, Object> predictionAndLabels = test.mapToPair(p ->
                    new Tuple2<>(model.predict(p.features()), p.label()));

            // Get evaluation metrics.
            MulticlassMetrics metrics = new MulticlassMetrics(predictionAndLabels.rdd());
            double accuracy = metrics.accuracy();

            fos.write(accuracy + "\n");

            fos.flush();
            fos.close();
        } catch (IOException e) {
            e.printStackTrace();
        }

        // Save and load model
//        model.save(sc, "src/main/resources/data/modal/LogisticRegressionWithLBFGSModel");
//        LogisticRegressionModel sameModel = LogisticRegressionModel.load(sc,
//                "src/main/resources/data/modal/LogisticRegressionWithLBFGSModel");
        // $example off$

        sc.stop();
    }
}
