package com.zyx.sparkdemo.mllib.featurescaler

import org.apache.spark.SparkConf
import org.apache.spark.ml.feature.MinMaxScaler
import org.apache.spark.ml.linalg.Vectors
import org.apache.spark.sql.SparkSession

/**
 * @author Yaxi.Zhang
 * @since 2021/8/27 15:26
 *        reference: https://blog.csdn.net/neilron/article/details/75329973
 */
object MinMaxScalarDemo {
  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("MinMaxScalarDemo")
    val spark = SparkSession.builder().config(sparkConf).getOrCreate()

    val dataFrame = spark.createDataFrame(Seq(
      (0, Vectors.dense(1.0, 0.5, -1.0)),
      (1, Vectors.dense(2.0, 1.0, 1.0)),
      (2, Vectors.dense(4.0, 10.0, 2.0))
    )).toDF("id", "features")

    // MinMaxScaler作用同样是每一列,即每一维特征. 将每一维特征线性地映射到指定的区间, 通常是[0, 1]。
    //    它也有两个参数可以设置：
    //      - min: 默认为0, 指定区间的下限
    //      - max: 默认为1, 指定区间的上限
    val scaler = new MinMaxScaler()
      .setInputCol("features")
      .setOutputCol("scaledFeatures")
    // 计算汇总统计量, 生成MinMaxScalerModel
    val scalerModel = scaler.fit(dataFrame)
    // rescale each feature to range [min, max].
    val scaledData = scalerModel.transform(dataFrame)

    println(s"Features scaled to range: [${scaler.getMin}, ${scaler.getMax}]")
    scaledData.show(truncate = false)
    /*
        每维特征线性地映射，最小值映射到0，最大值映射到1
        +---+--------------+-----------------------------------------------------------+
        |id |features      |scaledFeatures                                             |
        +---+--------------+-----------------------------------------------------------+
        |0  |[1.0,0.5,-1.0]|[0.0,0.0,0.0]                                              |
        |1  |[2.0,1.0,1.0] |[0.3333333333333333,0.05263157894736842,0.6666666666666666]|
        |2  |[4.0,10.0,2.0]|[1.0,1.0,1.0]                                              |
        +---+--------------+-----------------------------------------------------------+
     */

    spark.close()
  }
}
