package chapter14


/**
 * author: yuhui
 * descriptions:
 * date: 2024 - 10 - 27 10:36 上午
 */


import org.apache.spark.sql.functions._
import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.{DataFrame, SparkSession}

case class DataAnalysis(feature1: Double, feature2: Double, feature3: Double, feature4: Double, label: String)

object DataAnalysis {

  def main(args: Array[String]): Unit = {

    val spark: SparkSession = SparkSession
      .builder()
      .appName("")
      .master("local[*]")
      .getOrCreate()

    import spark.implicits._

    val lines = spark.sparkContext.textFile("BookData/DataAnalysis.csv")
    //将RDD关联了Schema，但是依然是RDD
    val dataDF: DataFrame = lines.map(line => {
      val fields = line.split(",")
      DataAnalysis(fields(0).toDouble, fields(1).toDouble, fields(2).toDouble, fields(3).toDouble, fields(4))
    }).toDF()

    // 显示全部值
    dataDF.show()

    // 2.1 最大值、最小值
    dataDF.agg(max($"feature1") as "max_feature1",
      min($"feature2") as "min_feature2")
      .show()
    println("=====2.1 最大值、最小值====")

    // 1.2 平均值
    dataDF.agg(mean($"feature1") as "mean_feature1",
      mean($"feature2") as "mean_feature2").show()

    println("=====1.2 平均值====")

    // 1.3 样本标准差&总体标准差
    dataDF.agg(stddev($"feature1") as "stddev_feature1",
      stddev_pop($"feature1") as "stddev_pop_feature1",
      stddev_samp($"feature1") as "stddev_samp_feature1").show()

    println("=====1.3 样本标准差&总体标准差====")

    val windowFun = Window.orderBy(col("feature3").asc)
    dataDF.withColumn("rank", row_number().over(windowFun)).show(false)

    val median_index = dataDF.agg(
      ((count($"feature3") + 1) / 2).cast("int") as "rank",
      ((count($"feature3") + 1) / 2 % 1) as "float_part"
    )

    median_index.show()


    dataDF.withColumn("next_feature3", lead(col("feature3"), 1).over(windowFun)).show(false)

    dataDF.withColumn("rank", row_number().over(windowFun))
      .withColumn("next_feature3", lead(col("feature3"), 1).over(windowFun))
      .join(median_index, Seq("rank"), "inner")
      .withColumn("median", ($"float_part" - lit(0)) * $"next_feature3" + (lit(1) - $"float_part") * $"feature3")
      .show()
    println("=====1.4 中位数====")


    //1.5 四分位数
    //    val windowFun = Window.orderBy(col("feature3").asc)

    val q1_index_add = dataDF.agg(
      ((count($"feature3") + 1) * 0.25).cast("int") as "rank",
      ((count($"feature3") + 1) * 0.25 % 1) as "float_part"
    )

    dataDF.withColumn("rank", row_number().over(windowFun))
      .withColumn("next_feature3", lead(col("feature3"), 1).over(windowFun))
      .join(q1_index_add, Seq("rank"), "inner")
      .withColumn("q1", ($"float_part" - lit(0)) * $"next_feature3" + (lit(1) - $"float_part") * $"feature3")
      .show()
    println("=====1.5 四分位数 n + 1 ====")
    /** *
     * +----+--------+--------+--------+--------+-----------+-------------+----------+---+
     * |rank|feature1|feature2|feature3|feature4|      label|next_feature3|float_part| q1|
     * +----+--------+--------+--------+--------+-----------+-------------+----------+---+
     * |  37|     4.9|     3.1|     1.6|     0.1|spark-huige|          1.6|      0.75|1.6|
     * +----+--------+--------+--------+--------+-----------+-------------+----------+---+
     */


    val q1_index_sub = dataDF.agg(
      ((count($"feature3") - 1) * 0.25).cast("int") as "rank",
      ((count($"feature3") - 1) * 0.25 % 1) as "float_part"
    )
    dataDF.withColumn("rank", row_number().over(windowFun))
      .withColumn("next_feature3", lead(col("feature3"), 1).over(windowFun))
      .join(q1_index_sub, Seq("rank"), "inner")
      .withColumn("q1", ($"float_part" - lit(0)) * $"next_feature3" + (lit(1) - $"float_part") * $"feature3")
      .show()

    println("=====1.6 四分位数 n - 1 ====")

    /** *
     * +----+--------+--------+--------+--------+-----------+-------------+----------+---+
     * |rank|feature1|feature2|feature3|feature4|      label|next_feature3|float_part| q1|
     * +----+--------+--------+--------+--------+-----------+-------------+----------+---+
     * |  37|     4.9|     3.1|     1.6|     0.1|spark-huige|          1.6|      0.25|1.6|
     * +----+--------+--------+--------+--------+-----------+-------------+----------+---+
     */

  }

}