package com.study.feature

import org.apache.spark.ml.feature.{Binarizer, MaxAbsScaler, MinMaxScaler, Normalizer, StandardScaler}
import org.apache.spark.ml.linalg.Vectors
import org.apache.spark.sql.SparkSession

/**
 * 特征归处理方法
 * 1、正则化：
 *      1）Normalizer:每一个行向量的范数变换为一个单位范数
 * 2、归一化：
 *      1）StandardScaler:将每一列的标准差缩放到1
 *      2）MinMaxScaler:将每一维特征线性地映射到指定的区间，通常是[0, 1]
 *      3）MaxAbsScaler:将每一维的特征变换到[-1, 1]闭区间上
 * 3、二值化
 *      1）Binarizer
 *
 * @author stephen
 * @date 2019-08-26 11:06
 */
object FeatureScalerDemo {

  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("FeatureScalerDemo")
      .master("local[*]")
      .getOrCreate()
    //val path = FeatureNormalizationDemo.getClass.getClassLoader.getResource("sample_libsvm_data.txt").getPath
    //val data = spark.read.format("libsvm").load(path)
    val data = spark.createDataFrame(Seq(
      (0, Vectors.dense(1.0, 0.5, -1.0)),
      (1, Vectors.dense(2.0, 1.0, 1.0)),
      (2, Vectors.dense(4.0, 10.0, 2.0))
    )).toDF("id", "features")

    // Normalizer的作用范围是每一行，使每一个行向量的范数变换为一个单位范数。
    // 正则化每个向量到1阶范数
    val normalizer = new Normalizer()
      .setInputCol("features")
      .setOutputCol("normFeatures")
      .setP(1.0)
    // L1范数，将每一行的规整为1阶范数为1的向量，1阶范数即所有值绝对值之和。
    val l1NormData = normalizer.transform(data)
    println("Normalized using L^1 norm")
    l1NormData.show(false)
    // 将每一行的规整为1阶范数为1的向量，1阶范数即所有值绝对值之和。
    //  +---+--------------+------------------+
    //  | id|      features|      normFeatures|
    //  +---+--------------+------------------+
    //  |  0|[1.0,0.5,-1.0]|    [0.4,0.2,-0.4]|
    //  |  1| [2.0,1.0,1.0]|   [0.5,0.25,0.25]|
    //  |  2|[4.0,10.0,2.0]|[0.25,0.625,0.125]|
    //  +---+--------------+------------------+

    // 正则化每个向量到无穷阶范数
    val lInfNormData = normalizer.transform(data, normalizer.p -> Double.PositiveInfinity)
    println("Normalized using L^inf norm")
    lInfNormData.show(false)
    // 将每一行的规整为1阶范数为1的向量，1阶范数即所有值绝对值之和。
    //  +---+--------------+------------------+
    //  | id|      features|      normFeatures|
    //  +---+--------------+------------------+
    //  |  0|[1.0,0.5,-1.0]|    [0.4,0.2,-0.4]|
    //  |  1| [2.0,1.0,1.0]|   [0.5,0.25,0.25]|
    //  |  2|[4.0,10.0,2.0]|[0.25,0.625,0.125]|
    //  +---+--------------+------------------+

    // StandardScaler处理的对象是每一列，也就是每一维特征，
    // 将特征标准化为单位标准差或是0均值，或是0均值单位标准差。
    val standardScaler = new StandardScaler()
      .setInputCol("features")
      .setOutputCol("scaledFeatures")
      //withStd: 默认为真。将数据标准化到单位标准差。
      .setWithStd(true)
      //withMean: 默认为假。是否变换为0均值。
      .setWithMean(false)
    // Compute summary statistics by fitting the StandardScaler.
    val standardScalerModel = standardScaler.fit(data)
    // Normalize each feature to have unit standard deviation.
    val standardScaledData = standardScalerModel.transform(data)
    standardScaledData.show(false)
    // 将每一列的标准差缩放到1。
    //  +---+--------------+------------------------------------------------------------+
    //  |id |features      |scaledFeatures                                              |
    //  +---+--------------+------------------------------------------------------------+
    //  |0  |[1.0,0.5,-1.0]|[0.6546536707079772,0.09352195295828244,-0.6546536707079771]|
    //  |1  |[2.0,1.0,1.0] |[1.3093073414159544,0.1870439059165649,0.6546536707079771]  |
    //  |2  |[4.0,10.0,2.0]|[2.618614682831909,1.870439059165649,1.3093073414159542]    |
    //  +---+--------------+------------------------------------------------------------+

    // MinMaxScaler作用同样是每一列，即每一维特征。
    // 将每一维特征线性地映射到指定的区间，通常是[0, 1]。
    val minMaxScaler = new MinMaxScaler()
      .setInputCol("features")
      .setOutputCol("scaledFeatures")
      // max: 默认为1。指定区间的上限。
      .setMax(1)
      // min: 默认为0。指定区间的下限。
      .setMin(0)
    // Compute summary statistics and generate MinMaxScalerModel
    val minMaxScalerModel = minMaxScaler.fit(data)
    // rescale each feature to range [min, max].
    val minMaxScaledData = minMaxScalerModel.transform(data)
    println(s"Features scaled to range: [${minMaxScaler.getMin}, ${minMaxScaler.getMax}]")
    minMaxScaledData.select("features", "scaledFeatures").show(false)
    // 每维特征线性地映射，最小值映射到0，最大值映射到1。
    //  +--------------+-----------------------------------------------------------+
    //  |features      |scaledFeatures                                             |
    //  +--------------+-----------------------------------------------------------+
    //  |[1.0,0.5,-1.0]|[0.0,0.0,0.0]                                              |
    //  |[2.0,1.0,1.0] |[0.3333333333333333,0.05263157894736842,0.6666666666666666]|
    //  |[4.0,10.0,2.0]|[1.0,1.0,1.0]                                              |
    //  +--------------+-----------------------------------------------------------+

    // MaxAbsScaler将每一维的特征变换到[-1, 1]闭区间上，
    // 通过除以每一维特征上的最大的绝对值，它不会平移整个分布，
    // 也不会破坏原来每一个特征向量的稀疏性。
    val maxAbsScaler = new MaxAbsScaler()
      .setInputCol("features")
      .setOutputCol("scaledFeatures")
    // Compute summary statistics and generate MaxAbsScalerModel
    val maxAbsScalerModel = maxAbsScaler.fit(data)
    // rescale each feature to range [-1, 1]
    val maxAbsScaledData = maxAbsScalerModel.transform(data)
    maxAbsScaledData.select("features", "scaledFeatures").show(false)
    // 每一维的绝对值的最大值为[4, 10, 2]
    //  +--------------+----------------+
    //  |      features|  scaledFeatures|
    //  +--------------+----------------+
    //  |[1.0,0.5,-1.0]|[0.25,0.05,-0.5]|
    //  | [2.0,1.0,1.0]|   [0.5,0.1,0.5]|
    //  |[4.0,10.0,2.0]|   [1.0,1.0,1.0]|
    //  +--------------+----------------+

    // 根据一个阈值将数值特征分为两类，大于阈值的特征二值化为1，否则二值化为0
    val binarizer = new Binarizer()
      .setInputCol("features")
      .setOutputCol("scaledFeatures")
      // 指定阈值
      .setThreshold(0.8)
    val binarizedData = binarizer.transform(data)
    binarizedData.select("features", "scaledFeatures").show(false)
    //  +--------------+--------------+
    //  |features      |scaledFeatures|
    //  +--------------+--------------+
    //  |[1.0,0.5,-1.0]|[1.0,0.0,0.0] |
    //  |[2.0,1.0,1.0] |[1.0,1.0,1.0] |
    //  |[4.0,10.0,2.0]|[1.0,1.0,1.0] |
    //  +--------------+--------------+
  }
}
