package com.zyx.sparkdemo.mllib.featurescaler

import org.apache.spark.SparkConf
import org.apache.spark.ml.feature.{MaxAbsScaler, Normalizer}
import org.apache.spark.ml.linalg.Vectors
import org.apache.spark.sql.SparkSession

/**
 * @author Yaxi.Zhang
 * @since 2021/8/27 16:56
 *        reference: https://blog.csdn.net/neilron/article/details/75329973
 */
object MaxAbsScalerDemo {
  def main(args: Array[String]): Unit = {

    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("NormalizerDemo")
    val spark = SparkSession.builder().config(sparkConf).getOrCreate()

    val dataFrame = spark.createDataFrame(Seq(
      (0, Vectors.dense(1.0, 0.5, -1.0)),
      (1, Vectors.dense(2.0, 1.0, 1.0)),
      (2, Vectors.dense(4.0, 10.0, 2.0))
    )).toDF("id", "features")

    // MaxAbsScaler将每一维的特征变换到[-1, 1]闭区间上, 通过除以每一维特征上的最大的绝对值, 它不会平移整个分布, 也不会破坏原来每一个特征向量的稀疏性
    val scaler = new MaxAbsScaler().setInputCol("features").setOutputCol("scaledFeatures")
    val scalerModel = scaler.fit(dataFrame)

    val scaledData = scalerModel.transform(dataFrame)

    scaledData.show
    /*
      每一维的绝对值的最大值为[4, 10, 2]:
        +---+--------------+----------------+
        | id|      features|  scaledFeatures|
        +---+--------------+----------------+
        |  0|[1.0,0.5,-1.0]|[0.25,0.05,-0.5]|
        |  1| [2.0,1.0,1.0]|   [0.5,0.1,0.5]|
        |  2|[4.0,10.0,2.0]|   [1.0,1.0,1.0]|
        +---+--------------+----------------+
     */

    spark.close()
  }
}
