package com.timeriver.feature_project

import org.apache.spark.ml.feature._
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}

/**
  * 幅度缩放算子：
  *   1. StandardScaler：将每个特征归一化，使其具有单位标准差
  *   2. MinMaxScaler：最大最小值标准化，默认范围在0-1之间，可以设置其他值
  *   3. MaxAbsScaler：最大绝对值标准化，范围在0-1之间
  */
object ScalerDemo {
  def main(args: Array[String]): Unit = {
    val session: SparkSession = SparkSession.builder()
      .appName("数据相关性计算")
      .master("local[6]")
      .getOrCreate()

    val df: DataFrame = session.read
      .format("jdbc")
      .option("url", "jdbc:mysql://10.0.24.197:3306/ml_datasets")
      .option("driver", "com.mysql.jdbc.Driver")
      .option("dbtable", "breast_cancer_wisconsin")
      .option("user", "root")
      .option("password", "123456")
      .load()

    /** 过滤缺失值 */
    val value: Dataset[Row] = df.filter(!_.anyNull)

    /** 获取特征列字段数组 */
    val inputCols: Array[String] = "clump_thickness,uniformity_of_cell_size,uniformity_of_cell_shape,marginal_adhesion,single_epithelial_cell_size,bare_nuclei,blan_chromatin,normal_nucleoli,mitoses".split(",")

    /** 构建特征列向量 */
    val data: DataFrame = new VectorAssembler()
      .setInputCols(inputCols)
      .setOutputCol("features")
      .transform(value)

    /** 特征归一化 */
    val scaler = new StandardScaler()
      .setInputCol("features")
      .setOutputCol("scaledFeatures")
      .setWithStd(true)
      .setWithMean(false)

    val scalerModel: StandardScalerModel = scaler.fit(data)
    val frame: DataFrame = scalerModel.transform(data)
    frame.show(false)

    /** 最大最小值标准化 */
    val minMaxScaler = new MinMaxScaler()
      .setInputCol("features")
      .setOutputCol("minMaxFeatures")

    val minMaxScalerModel: MinMaxScalerModel = minMaxScaler.fit(data)
    val frame2: DataFrame = minMaxScalerModel.transform(data)
    frame2.show(false)

    /** 最大绝对值标准化 */
    val maxAbsScaler: MaxAbsScaler = new MaxAbsScaler()
      .setInputCol("features")
      .setOutputCol("maxAbsFeatures")

    val maxAbsScalerModel: MaxAbsScalerModel = maxAbsScaler.fit(data)
    val frame3: DataFrame = maxAbsScalerModel.transform(data)
    frame3.show(false)
  }
}
