package cn.lgwen.spark.ml.learning.kaggle;

import org.apache.spark.api.java.function.FilterFunction;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.ml.feature.*;
import org.apache.spark.ml.linalg.Vector;
import org.apache.spark.ml.linalg.VectorUDT;
import org.apache.spark.ml.linalg.Vectors;
import org.apache.spark.ml.regression.RandomForestRegressionModel;
import org.apache.spark.ml.regression.RandomForestRegressor;
import org.apache.spark.sql.*;
import org.apache.spark.sql.catalyst.encoders.RowEncoder;
import org.apache.spark.sql.types.*;

import java.io.IOException;


/**
 * 2020/3/14
 * aven.wu
 * danxieai258@163.com
 */
public class Titanic {

    public static void main(String[] args) throws IOException {
        SparkSession spark = SparkSession
                .builder().master("local[*]")
                .appName("Titanic")
                .getOrCreate();

        StructType schema = new StructType(new StructField[]{
                new StructField("PassengerId", DataTypes.IntegerType, false, Metadata.empty()),//0
                new StructField("Survived", DataTypes.IntegerType, false, Metadata.empty()),//1
                new StructField("Pclass", DataTypes.DoubleType, false, Metadata.empty()),//2
                new StructField("Name", DataTypes.StringType, false, Metadata.empty()),//3
                new StructField("Sex", DataTypes.StringType, false, Metadata.empty()),//4
                new StructField("Age", DataTypes.DoubleType, true, Metadata.empty()),//5
                new StructField("SibSp", DataTypes.DoubleType, false, Metadata.empty()),//6
                new StructField("Parch", DataTypes.DoubleType, false, Metadata.empty()),//7
                new StructField("Ticket", DataTypes.StringType, false, Metadata.empty()),//8
                new StructField("Fare", DataTypes.DoubleType, true, Metadata.empty()),//9
                new StructField("Cabin", DataTypes.StringType, true, Metadata.empty()),//10
                new StructField("Embarked", DataTypes.StringType, false, Metadata.empty())//11
        });
        Dataset<Row> dataSet = spark.read().schema(schema).csv("/Users/wxy/Develop/python_rep/data/titanic/train.csv")
                .filter((FilterFunction<Row>) x -> x.get(0) != null);
        //Dataset<Row> trainSet = dataSet.select("Age", "Fare", "Parch", "SibSp", "Pclass");

        Dataset<Row> knowAge = dataSet.filter((FilterFunction<Row>) value -> value.get(5) != null);
        Dataset<Row> unknownAge = dataSet.filter((FilterFunction<Row>) value -> value.get(5) == null);

        StructType AgeComplementSchema = new StructType(new StructField[]{
                new StructField("Age", DataTypes.DoubleType, true, Metadata.empty()),
                new StructField("feature", new VectorUDT(), false, Metadata.empty()),
        });
        Dataset<Row> row = knowAge.map(new MapFunction<Row, Row>() {
            @Override
            public Row call(Row value) throws Exception {
                Vector vector = Vectors.dense(
                        value.getDouble(9),
                        value.getDouble(7),
                        value.getDouble(6),
                        value.getDouble(2));
                return RowFactory.create(value.getDouble(5), vector);
            }
        }, RowEncoder.apply(AgeComplementSchema));

        RandomForestRegressor rf = new RandomForestRegressor()
                .setLabelCol("Age")
                .setFeaturesCol("feature");
        RandomForestRegressionModel model = rf.fit(row);
        //model.write().overwrite().save("/Users/wxy/Develop/python_rep/data/titanic/modes/age_rfr");

        Dataset<Row> res = model.transform(
                unknownAge.map(new Transform(), RowEncoder.apply(new StructType(new StructField[]{
                        new StructField("PassengerId", DataTypes.IntegerType, false, Metadata.empty()),//0
                        new StructField("Survived", DataTypes.IntegerType, false, Metadata.empty()),//1
                        new StructField("Pclass", DataTypes.DoubleType, false, Metadata.empty()),//2
                        new StructField("Name", DataTypes.StringType, false, Metadata.empty()),//3
                        new StructField("Sex", DataTypes.StringType, false, Metadata.empty()),//4
                        new StructField("Age", DataTypes.DoubleType, true, Metadata.empty()),//5
                        new StructField("SibSp", DataTypes.DoubleType, false, Metadata.empty()),//6
                        new StructField("Parch", DataTypes.DoubleType, false, Metadata.empty()),//7
                        new StructField("Ticket", DataTypes.StringType, false, Metadata.empty()),//8
                        new StructField("Fare", DataTypes.DoubleType, true, Metadata.empty()),//9
                        new StructField("Cabin", DataTypes.StringType, true, Metadata.empty()),//10
                        new StructField("Embarked", DataTypes.StringType, false, Metadata.empty()),//11
                        new StructField("feature", new VectorUDT(), true, Metadata.empty()),
                }))));
        res.registerTempTable("unknown_age");
        spark.udf().register("if", functions.udf(x -> x == null ? 0 : 1, DataTypes.IntegerType));
        Dataset<Row> dataset = spark.sql("select PassengerId, Survived, Pclass, Name, Sex, prediction as Age, SibSp, Parch, Ticket, Fare, Cabin ,Embarked from unknown_age");

        Dataset<Row> trainData = knowAge.union(dataset).map(new ValueToDouble(),
                RowEncoder.apply(new StructType(new StructField[]{
                        new StructField("PassengerId", DataTypes.IntegerType, false, Metadata.empty()),//0
                        new StructField("Survived", DataTypes.IntegerType, false, Metadata.empty()),//1
                        new StructField("Pclass", DataTypes.DoubleType, false, Metadata.empty()),//2
                        new StructField("Sex", DataTypes.DoubleType, false, Metadata.empty()),//4
                        new StructField("Age", DataTypes.DoubleType, true, Metadata.empty()),//5
                        new StructField("SibSp", DataTypes.DoubleType, false, Metadata.empty()),//6
                        new StructField("Parch", DataTypes.DoubleType, false, Metadata.empty()),//7
                        new StructField("Fare", DataTypes.DoubleType, true, Metadata.empty()),//9
                        new StructField("Cabin", DataTypes.DoubleType, true, Metadata.empty()),//10
                        new StructField("Embarked", DataTypes.DoubleType, false, Metadata.empty())//11
                }))
                ).map((MapFunction<Row, Row>) value -> RowFactory.create(
                        value.getInt(0),
                        value.getInt(1),
                        value.getDouble(2),
                        value.getDouble(3),
                        value.getDouble(4),
                        value.getDouble(5),
                        value.getDouble(6),
                        value.getDouble(7),
                        value.getDouble(8),
                        value.getDouble(9),
                        Vectors.dense(new double[]{value.getDouble(4)}),
                        Vectors.dense(new double[]{value.getDouble(7)}),
                        Vectors.dense(new double[]{value.getDouble(2)}),
                        Vectors.dense(new double[]{value.getDouble(6)})
                        ),
                RowEncoder.apply(new StructType(new StructField[]{
                        new StructField("PassengerId", DataTypes.IntegerType, false, Metadata.empty()),//0
                        new StructField("Survived", DataTypes.IntegerType, false, Metadata.empty()),//1
                        new StructField("Pclass", DataTypes.DoubleType, false, Metadata.empty()),//2
                        new StructField("Sex", DataTypes.DoubleType, false, Metadata.empty()),//3
                        new StructField("Age", DataTypes.DoubleType, true, Metadata.empty()),//4
                        new StructField("SibSp", DataTypes.DoubleType, false, Metadata.empty()),//5
                        new StructField("Parch", DataTypes.DoubleType, false, Metadata.empty()),//6
                        new StructField("Fare", DataTypes.DoubleType, true, Metadata.empty()),//7
                        new StructField("Cabin", DataTypes.DoubleType, true, Metadata.empty()),//8
                        new StructField("Embarked", DataTypes.DoubleType, false, Metadata.empty()),//9
                        new StructField("Age_Vector", new VectorUDT(), true, Metadata.empty()),//10
                        new StructField("Fare_Vector", new VectorUDT(), true, Metadata.empty()),//11,
                        new StructField("Pclass_Vector", new VectorUDT(), true, Metadata.empty()),//10
                        new StructField("Parch_Vector", new VectorUDT(), true, Metadata.empty())//11
                })));


        MaxAbsScaler scaler = new MaxAbsScaler()
                .setInputCol("Fare_Vector")
                .setOutputCol("FareScaler");
        MaxAbsScalerModel maxAbsScalerModel = scaler.fit(trainData);
        Dataset<Row> trainFareData = maxAbsScalerModel.transform(trainData);

        MaxAbsScaler Agescaler = new MaxAbsScaler()
                .setInputCol("Age_Vector")
                .setOutputCol("AgeScaler");
        MaxAbsScalerModel AgeScalerModel = Agescaler.fit(trainFareData);
        Dataset<Row> trainAgeData = AgeScalerModel.transform(trainFareData);

        MaxAbsScaler Pclassscaler = new MaxAbsScaler()
                .setInputCol("Pclass_Vector")
                .setOutputCol("PclassScaler");
        MaxAbsScalerModel PclassScalerModel = Pclassscaler.fit(trainAgeData);
        Dataset<Row> PclassData = PclassScalerModel.transform(trainAgeData);

        MaxAbsScaler Parchscaler = new MaxAbsScaler()
                .setInputCol("Parch_Vector")
                .setOutputCol("ParchScaler");
        MaxAbsScalerModel ParchScalerModel = Parchscaler.fit(PclassData);
        Dataset<Row> ParchAgeData = ParchScalerModel.transform(PclassData);


        Dataset<Row> finalDataSet = ParchAgeData.select("Survived", "Sex", "SibSp", "Cabin", "Embarked",
                "FareScaler", "AgeScaler", "PclassScaler", "ParchScaler")
                .map((MapFunction<Row, Row>) value -> RowFactory.create(
                        (double) value.getInt(0),
                        value.getDouble(1), value.getDouble(2),
                        value.getDouble(3), value.getDouble(4),
                        ((Vector) value.get(5)).toArray()[0],
                        ((Vector) value.get(6)).toArray()[0],
                        ((Vector) value.get(7)).toArray()[0],
                        ((Vector) value.get(8)).toArray()[0]),
                        RowEncoder.apply(new StructType(new StructField[]{
                                new StructField("Survived", DataTypes.DoubleType, true, Metadata.empty()),//0
                                new StructField("Sex", DataTypes.DoubleType, true, Metadata.empty()),//2
                                new StructField("SibSp", DataTypes.DoubleType, true, Metadata.empty()),//4
                                new StructField("Cabin", DataTypes.DoubleType, true, Metadata.empty()),//8
                                new StructField("Embarked", DataTypes.DoubleType, true, Metadata.empty()),//9
                                new StructField("Fare", DataTypes.DoubleType, true, Metadata.empty()),//7
                                new StructField("Age", DataTypes.DoubleType, true, Metadata.empty()),//3
                                new StructField("Pclass", DataTypes.DoubleType, true, Metadata.empty()),//3
                                new StructField("Parch", DataTypes.DoubleType, true, Metadata.empty()),//3
                        })));

        finalDataSet.write().mode("Append").option("header", true).option("inferSchema", true)
                .csv("/Users/wxy/Develop/python_rep/data/titanic/train_data");
        /*String featureFields[] = new String[]{"Sex", "SibSp", "Cabin", "Embarked",
                "Fare", "Age", "Pclass", "Parch"};
        VectorAssembler vectorAssem4 = new VectorAssembler()
                .setInputCols(featureFields).
                        setOutputCol("features").setHandleInvalid("keep");
        trainFareData = vectorAssem4.transform(finalDataSet);

        LogisticRegression lr = new LogisticRegression()
                .setLabelCol("Survived").setMaxIter(10)
                .setRegParam(0.3).setElasticNetParam(0.8)
                .setFeaturesCol("features");
        LogisticRegressionModel lrModel = lr.fit(trainFareData);

        System.out.println("Coefficients: "
                + lrModel.coefficients() + " Intercept: " + lrModel.intercept());

        Dataset<Row> testData = TitanicTestData.testData(spark);
        Dataset<Row> result = lrModel.transform(testData);
        result.select("PassengerId", "prediction", "features").show(30);*/
    }

    public static class ValueToDouble implements MapFunction<Row, Row> {
        @Override
        public Row call(Row value) throws Exception {

            double Embarked = 0.;
            if ("S".equals(value.get(11))) {
                Embarked = 1.;
            } else if("C".equals(value.get(11))) {
                Embarked = 2.;
            }

            return RowFactory.create(
                    value.get(0),
                    value.get(1),
                    value.getDouble(2),
                    "female".equals(value.get(4))? 0. : 1.,
                    value.getDouble(5),
                    value.getDouble(6),
                    value.getDouble(7),
                    value.getDouble(9),
                    value.get(10) == null ? 0. : 1.,
                    Embarked);
        }
    }

    public static class Transform implements MapFunction<Row, Row> {

        @Override
        public Row call(Row value) throws Exception {
            Vector vector = Vectors.dense(
                    value.getDouble(9),
                    value.getDouble(7),
                    value.getDouble(6),
                    value.getDouble(2));
            return RowFactory.create(
                    value.get(0),
                    value.get(1),
                    value.get(2),
                    value.get(3),
                    value.get(4),
                    value.get(5),
                    value.get(6),
                    value.get(7),
                    value.get(8),
                    value.get(9),
                    value.get(10),
                    value.get(11),
                    vector);
        }
    }
}
