package cn.lgwen.spark.ml.learning.kaggle;

import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.ml.feature.MaxAbsScaler;
import org.apache.spark.ml.feature.MaxAbsScalerModel;
import org.apache.spark.ml.feature.VectorAssembler;
import org.apache.spark.ml.linalg.Vector;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.catalyst.encoders.RowEncoder;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.Metadata;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;

/**
 * 2020/3/15
 * aven.wu
 * danxieai258@163.com
 */
public class TitanicTestData {

    public static void main(String[] args) {
        SparkSession spark = SparkSession
                .builder().master("local[*]")
                .appName("titanicTestData")
                .getOrCreate();
        testData(spark);
    }

    public static Dataset<Row> testData(SparkSession spark) {
        Dataset<Row> testData = spark.read().format("csv").option("header", true).option("inferSchema", true)
                .load("/Users/wxy/Develop/python_rep/data/titanic/test.csv");

        testData = testData.map(new MapFunction<Row, Row>() {
            @Override
            public Row call(Row value) throws Exception {

                double Embarked = 0.;
                if ("S".equals(value.get(10))) {
                    Embarked = 1.;
                } else if ("C".equals(value.get(10))) {
                    Embarked = 2.;
                }

                return RowFactory.create(
                        value.get(0), value.get(1), "female".equals(value.get(3)) ? 0. : 1.,
                        value.get(4), value.get(5), value.get(6), value.get(8), value.get(9) == null ? 0. : 1.,
                        Embarked);
            }
        }, RowEncoder.apply(
                new StructType(new StructField[]{
                        new StructField("PassengerId", DataTypes.IntegerType, true, Metadata.empty()),//0
                        new StructField("Pclass", DataTypes.IntegerType, true, Metadata.empty()),//1
                        new StructField("Sex", DataTypes.DoubleType, true, Metadata.empty()),//3
                        new StructField("Age", DataTypes.DoubleType, true, Metadata.empty()),//4
                        new StructField("SibSp", DataTypes.IntegerType, true, Metadata.empty()),//5
                        new StructField("Parch", DataTypes.IntegerType, true, Metadata.empty()),//6
                        new StructField("Fare", DataTypes.DoubleType, true, Metadata.empty()),//8
                        new StructField("Cabin", DataTypes.DoubleType, true, Metadata.empty()),//9
                        new StructField("Embarked", DataTypes.DoubleType, true, Metadata.empty())//10
                })));

        // 向量化两个变量
        VectorAssembler vectorAssem = new VectorAssembler()
                .setInputCols(new String[]{"Fare"}).
                        setOutputCol("Fare_Vector").setHandleInvalid("keep");
        Dataset<Row> feaTrain = vectorAssem.transform(testData);

        VectorAssembler vectorAssem1 = new VectorAssembler()
                .setInputCols(new String[]{"Age"}).
                        setOutputCol("Age_Vector").setHandleInvalid("keep");
        feaTrain = vectorAssem1.transform(feaTrain);

        VectorAssembler vectorAssem2 = new VectorAssembler()
                .setInputCols(new String[]{"Pclass"}).
                        setOutputCol("Pclass_Vector").setHandleInvalid("keep");
        feaTrain = vectorAssem2.transform(feaTrain);

        VectorAssembler vectorAssem3 = new VectorAssembler()
                .setInputCols(new String[]{"Parch"}).
                        setOutputCol("Parch_Vector").setHandleInvalid("keep");
        feaTrain = vectorAssem3.transform(feaTrain);

        MaxAbsScaler scaler = new MaxAbsScaler()
                .setInputCol("Fare_Vector")
                .setOutputCol("FareScaler");
        MaxAbsScalerModel maxAbsScalerModel = scaler.fit(feaTrain);
        Dataset<Row> trainFareData = maxAbsScalerModel.transform(feaTrain);

        MaxAbsScaler AgeScaler = new MaxAbsScaler()
                .setInputCol("Age_Vector")
                .setOutputCol("AgeScaler");
        MaxAbsScalerModel ageScalerModel = AgeScaler.fit(trainFareData);
        trainFareData = ageScalerModel.transform(trainFareData);

        MaxAbsScaler Pclassscaler = new MaxAbsScaler()
                .setInputCol("Pclass_Vector")
                .setOutputCol("PclassScaler");
        MaxAbsScalerModel PclassScalerModel = Pclassscaler.fit(trainFareData);
        trainFareData = PclassScalerModel.transform(trainFareData);

        MaxAbsScaler ParchScaler = new MaxAbsScaler()
                .setInputCol("Parch_Vector")
                .setOutputCol("ParchScaler");
        MaxAbsScalerModel ParchScalerModel = ParchScaler.fit(trainFareData);
        trainFareData = ParchScalerModel.transform(trainFareData);

        trainFareData = trainFareData.select("PassengerId", "Sex", "SibSp", "Cabin", "Embarked",
                "FareScaler", "AgeScaler", "PclassScaler", "ParchScaler").map((MapFunction<Row, Row>) value ->
                RowFactory.create(
                        value.get(0), value.get(1), value.get(2),
                        value.get(3),value.get(4),
                        ((Vector) value.get(5)).toArray()[0],
                        ((Vector) value.get(6)).toArray()[0],
                        ((Vector) value.get(7)).toArray()[0],
                        ((Vector) value.get(8)).toArray()[0])
                , RowEncoder.apply(
                new StructType(new StructField[]{
                        new StructField("PassengerId", DataTypes.IntegerType, true, Metadata.empty()),//0
                        new StructField("Sex", DataTypes.DoubleType, true, Metadata.empty()),//2
                        new StructField("SibSp", DataTypes.IntegerType, true, Metadata.empty()),//4
                        new StructField("Cabin", DataTypes.DoubleType, true, Metadata.empty()),//8
                        new StructField("Embarked", DataTypes.DoubleType, true, Metadata.empty()),//9
                        new StructField("Fare", DataTypes.DoubleType, true, Metadata.empty()),//7
                        new StructField("Age", DataTypes.DoubleType, true, Metadata.empty()),//3
                        new StructField("Pclass", DataTypes.DoubleType, true, Metadata.empty()),//3
                        new StructField("Parch", DataTypes.DoubleType, true, Metadata.empty()),//3
                })));

        trainFareData.write().mode("Append").option("header", true).option("inferSchema", true)
                .csv("/Users/wxy/Develop/python_rep/data/titanic/test_data");

        VectorAssembler vectorAssem4 = new VectorAssembler()
                .setInputCols(new String[]{"Sex", "SibSp", "Cabin", "Embarked",
                        "Fare", "Age", "Pclass", "Parch"}).
                        setOutputCol("features").setHandleInvalid("keep");
        trainFareData = vectorAssem4.transform(trainFareData);

        trainFareData.select("PassengerId", "features").show(false);

        return trainFareData;
    }
}
