import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.ml.feature.StandardScaler;
import org.apache.spark.ml.feature.StandardScalerModel;
import org.apache.spark.ml.linalg.VectorUDT;
import org.apache.spark.ml.linalg.Vectors;
import org.apache.spark.ml.linalg.DenseVector;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.StructType;

import java.util.Arrays;
import java.util.List;

public class Tset2 {
    public static void main(String[] args) {
        // 创建 Spark 配置
        SparkConf sparkConf = new SparkConf().setAppName("StandardScalerExample").setMaster("local[*]");
        JavaSparkContext jsc = new JavaSparkContext(sparkConf);
        SparkSession sparkSession = SparkSession.builder().config(sparkConf).getOrCreate();

        // 输入数据
        List<Row> rowData = Arrays.asList(
                RowFactory.create(Vectors.dense(0.961639043, 210, 88, 580717, 1.0)),
                RowFactory.create(Vectors.dense(1.25231444, 140, 85, 293678, 7.0)),
                RowFactory.create(Vectors.dense(1.254675516, 135, 85, 283712, 11.0)),
                RowFactory.create(Vectors.dense(1.090869565, 23, 67, 281336, 97.0)),
                RowFactory.create(Vectors.dense(0.970657895, 152, 59, 309928, 5.0)),
                RowFactory.create(Vectors.dense(0.967692483, 92, 73, 294585, 79.0)),
                RowFactory.create(Vectors.dense(0.965346535, 101, 96, 287042, 1.0)),
                RowFactory.create(Vectors.dense(0.962070222, 73, 47, 287230, 3.0)),
                RowFactory.create(Vectors.dense(0.828478237, 56, 33, 321489, 6.0)),
                RowFactory.create(Vectors.dense(0.708010153, 64, 44, 375074, 15.0))
        );

        // 创建 DataFrame
        Dataset<Row> dataFrame = sparkSession.createDataFrame(rowData, new StructType()
                .add("features", new VectorUDT()));

        // 创建 StandardScaler
        StandardScaler standardScaler = new StandardScaler()
                .setInputCol("features")
                .setOutputCol("scaledFeatures")
                .setWithStd(true)
                .setWithMean(true);

        // 拟合并转换数据
        StandardScalerModel scalerModel = standardScaler.fit(dataFrame);
        Dataset<Row> scaledDataFrame = scalerModel.transform(dataFrame);

        // 打印结果
        scaledDataFrame.show(false);
    }
}
