package com.shujia.sql

import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.{Column, DataFrame, SparkSession}

object Demo07Burks {
  def main(args: Array[String]): Unit = {
    // 创建SparkSession
    val spark: SparkSession = SparkSession
      .builder()
      .appName("Demo07Burks")
      .master("local")
      .config("spark.sql.shuffle.partitions", 1) // 默认200
      .getOrCreate()

    import org.apache.spark.sql.functions._
    import spark.implicits._

    // 读取burks.txt数据
    val burksDF: DataFrame = spark
      .read
      .format("csv")
      .option("sep", ",")
      .schema("burk String,year String,tsl01 String,tsl02 String,tsl03 String,tsl04 String,tsl05 String,tsl06 String,tsl07 String,tsl08 String,tsl09 String,tsl10 String,tsl11 String,tsl12 String")
      .load("Spark/data/burks.txt")

    burksDF.cache()
    burksDF.createOrReplaceTempView("burks")
    //    burksDF.show()

    // SQL的方式
    spark.sql(
      """
        |SELECT  t1.burk
        |        ,t1.year
        |        ,t1.month
        |        ,t1.amount
        |        ,sum(t1.amount) over(partition by t1.burk,t1.year order by t1.month) as sum_amount
        |from(
        |        SELECT  burk
        |                ,year
        |                ,month
        |                ,amount
        |        from burks lateral view explode(map(1,tsl01,2,tsl02,3,tsl03,4,tsl04,5,tsl05,6,tsl06,7,tsl07,8,tsl08,9,tsl09,10,tsl10,11,tsl11,12,tsl12)) v1 as month,amount
        |) t1
        |""".stripMargin)
    //      .show(50)

    val col: Column = map(
      expr("1"), $"tsl01"
      , expr("2"), $"tsl02"
      , expr("3"), $"tsl03"
      , expr("4"), $"tsl04"
      , expr("5"), $"tsl05"
      , expr("6"), $"tsl06"
      , expr("7"), $"tsl07"
      , expr("8"), $"tsl08"
      , expr("9"), $"tsl09"
      , expr("10"), $"tsl10"
      , expr("11"), $"tsl11"
      , expr("12"), $"tsl12"
    )

    // DSL的方式
    burksDF
      .select($"burk"
        , $"year"
        , explode(col) as Array("month", "amount"))
      .select($"burk", $"year", $"month", $"amount",
        sum($"amount") over Window.partitionBy($"burk", $"year").orderBy($"month") as "sum_amount")
    //      .show()

    // 2、统计每个公司当月比上年同期增长率

    // SQL的方式
    spark.sql(
      """
        |select  tt1.burk
        |        ,tt1.year
        |        ,tt1.month
        |        ,tt1.amount
        |        ,tt1.last_amount
        |        ,round(tt1.amount / tt1.last_amount,8) -1 as incr
        |from (
        |        SELECT  t1.burk
        |                ,t1.year
        |                ,t1.month
        |                ,t1.amount
        |                ,lag(t1.amount,1) over(partition by burk,month  order by year ) as last_amount
        |        from (
        |                SELECT  burk
        |                        ,year
        |                        ,month
        |                        ,amount
        |                from burks lateral view explode(map(1,tsl01,2,tsl02,3,tsl03,4,tsl04,5,tsl05,6,tsl06,7,tsl07,8,tsl08,9,tsl09,10,tsl10,11,tsl11,12,tsl12)) v1 as month,amount
        |        ) t1
        |) tt1
        |""".stripMargin)
      .show()

    // DSL
    burksDF
      .select($"burk"
        , $"year"
        , explode(col) as Array("month", "amount"))
      .select($"burk", $"year", $"month", $"amount",
        lag($"amount", 1) over Window.partitionBy($"burk", $"month").orderBy($"year") as "last_amount")
      .select($"burk", $"year", $"month", $"amount", $"last_amount", round($"amount" / $"last_amount", 8) - 1 as "incr")
      .show()

  }

}
