package com.itcast.spark.baseFeaturation

import org.apache.spark.ml.feature._
import org.apache.spark.ml.linalg.Vectors
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.{SparkConf, SparkContext}

/**
 * DESC:目的是用于连续值属性的离散化
 * Buckrizer操作---分箱
 */
object _05ChiaquarerDataTest {
  def main(args: Array[String]): Unit = {
    //这里是准备环境
    val conf: SparkConf = new SparkConf().setAppName("_04RandomNumber").setMaster("local[*]")
    val spark: SparkSession = SparkSession.builder().config(conf).getOrCreate()
    val sc: SparkContext = spark.sparkContext
    sc.setLogLevel("WARN")
    val splits: Array[Double] = Array(Double.NegativeInfinity, -8, 0, 8, Double.PositiveInfinity)
    val dataset = spark.createDataFrame(
      Seq((0, 18, 1.0, Vectors.dense(0.0, 10.0, 0.5), 1.0))
    ).toDF("id", "hour", "mobile", "userFeatures", "clicked")
    //需求：；通过卡方验证选择和y最相关的x
    //首先这里的features是vector类型的
    val assembler: VectorAssembler = new VectorAssembler().setInputCols(Array("hour", "mobile", "userFeatures")).setOutputCol("features")
    val result: DataFrame = assembler.transform(dataset)
    //使用卡方验证的方法
    val selector: ChiSqSelector = new ChiSqSelector().setFeaturesCol("features").setLabelCol("clicked").setNumTopFeatures(2)
    val selectorModel: ChiSqSelectorModel = selector.fit(result)
    selectorModel.transform(result).show()
  }
}
