package com.shujia.sql

import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.{DataFrame, SparkSession}

object Demo05Burks {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .appName("Demo05Burks")
      .master("local")
      // 在Spark SQL中如果有shuffle操作 则之后默认的分区数为 200，会导致任务数量过多
      .config("spark.sql.shuffle.partitions", "2")
      .getOrCreate()

    val burkDF: DataFrame = spark
      .read
      .format("csv")
      .option("sep", ",")
      .schema("burk String,year Int,tsl01 Int,tsl02 Int,tsl03 Int,tsl04 Int,tsl05 Int,tsl06 Int,tsl07 Int,tsl08 Int,tsl09 Int,tsl10 Int,tsl11 Int,tsl12 Int")
      .load("spark/data/sql/burks.txt")

    burkDF.createOrReplaceTempView("burk_tb")

    // SQL的方式
    spark
      .sql(
        """
          |select  t1.burk
          |        ,t1.year
          |        ,t1.month
          |        ,t1.amount
          |        ,sum(t1.amount) over (partition by t1.burk,t1.year order by t1.month) as sum_amount
          |from (
          |    select  burk
          |            ,year
          |            ,index + 1 as month
          |            ,amount
          |    from burk_tb
          |    lateral view posexplode(array(tsl01,tsl02,tsl03,tsl04,tsl05,tsl06,tsl07,tsl08,tsl09,tsl10,tsl11,tsl12)) v as index,amount
          |) t1
          |""".stripMargin)
    //      .show(1000)

    spark
      .sql(
        """
          |select  tt1.burk
          |        ,tt1.year
          |        ,tt1.month
          |        ,tt1.amount
          |        ,tt1.last_amount
          |        ,round(if(tt1.last_amount is null,0,tt1.amount/tt1.last_amount - 1),5) as incr
          |from (
          |    select  t1.burk
          |            ,t1.year
          |            ,t1.month
          |            ,t1.amount
          |            ,lag(t1.amount,1) over (partition by t1.burk,t1.month order by t1.year) as last_amount
          |    from (
          |        select  burk
          |                ,year
          |                ,index + 1 as month
          |                ,amount
          |        from burk_tb
          |        lateral view posexplode(array(tsl01,tsl02,tsl03,tsl04,tsl05,tsl06,tsl07,tsl08,tsl09,tsl10,tsl11,tsl12)) v as index,amount
          |    ) t1
          |) tt1
          |""".stripMargin)
    //      .show()

    import spark.implicits._
    import org.apache.spark.sql.functions._

    // DSL的方式
    val hangToLieDF: DataFrame = burkDF
      .select($"burk", $"year", posexplode(array("tsl01", "tsl02", "tsl03", "tsl04", "tsl05", "tsl06", "tsl07", "tsl08", "tsl09", "tsl10", "tsl11", "tsl12")) as Array("month", "amount"))
      .select($"burk", $"year", $"month" + 1 as "month", $"amount")

    hangToLieDF.cache()

    hangToLieDF
      .withColumn("sum_amount", sum($"amount") over Window.partitionBy($"burk", $"year").orderBy($"month"))
      .show()

    hangToLieDF
      .withColumn("last_amount", lag($"amount", 1) over Window.partitionBy($"burk", $"month").orderBy($"year"))
      .withColumn("incr", when($"last_amount".isNull, 0).otherwise(round(($"amount" - $"last_amount") / $"last_amount", 5)))
      .show()


  }

}
