package com.shujia.spark.sql

import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.{Column, DataFrame, SparkSession}

object Demo5Burk {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .appName("dsl")
      .master("local")
      .config("spark.sql.shuffle.partitions", 1)
      .getOrCreate()
    import spark.implicits._
    import org.apache.spark.sql.functions._

    //读取数据
    val burksDF: DataFrame = spark
      .read
      .format("csv")
      .schema("burk STRING,year STRING,tsl01 DOUBLE,tsl02 DOUBLE,tsl03 DOUBLE,tsl04 DOUBLE,tsl05 DOUBLE,tsl06 DOUBLE,tsl07 DOUBLE,tsl08 DOUBLE,tsl09 DOUBLE,tsl10 DOUBLE,tsl11 DOUBLE,tsl12 DOUBLE")
      .option("sep", ",")
      .load("data/burks.txt")

    burksDF.createOrReplaceTempView("burks")

    /**
     * 1、统计每个公司每年按月累计收入  行转列 --> sum窗口函数
     * 输出结果
     * 公司代码,年度,月份,当月收入,累计收入
     */

    spark.sql(
      """
        |select
        | burk,year,month,tsl,
        | sum(tsl) over(partition by burk,year order by month) as sum_tsl
        | from (
        |   select burk,year,month,tsl from
        |   burks
        |   lateral view explode(map(1,tsl01,2,tsl02,3,tsl03,4,tsl04,5,tsl05,6,tsl06,7,tsl07,8,tsl08,9,tsl09,10,tsl10,11,tsl11,12,tsl12)) t as month,tsl
        |) as a
        |""".stripMargin) //.show()

    val tsl_map: Column = map(
      expr("1"), $"tsl01",
      expr("2"), $"tsl02",
      expr("3"), $"tsl03",
      expr("4"), $"tsl04",
      expr("5"), $"tsl05",
      expr("6"), $"tsl06",
      expr("7"), $"tsl07",
      expr("8"), $"tsl08",
      expr("9"), $"tsl09",
      expr("10"), $"tsl10",
      expr("11"), $"tsl11",
      expr("12"), $"tsl12",
    )


    burksDF
      //一行转换成多行
      .select($"burk", $"year", explode(tsl_map) as Array("month", "tsl"))
      //计算累计
      .withColumn("sum_tsl", sum($"tsl") over Window.partitionBy($"burk", $"year").orderBy($"month"))
      .show()


    /**
     * 2、统计每个公司当月比上年同期增长率  行转列 --> lag窗口函数
     * 公司代码,年度,月度,增长率（当月收入/上年当月收入 - 1）
     */

    spark.sql(
      """
        |select * ,
        |nvl(round((tsl / last_tsl - 1 ) * 100,2),100) as p
        |from(
        |   select
        |    burk,year,month,tsl,
        |    lag(tsl,1,0) over(partition by burk,month order by year) last_tsl
        |    from (
        |      select burk,year,month,tsl from
        |      burks
        |      lateral view explode(map(1,tsl01,2,tsl02,3,tsl03,4,tsl04,5,tsl05,6,tsl06,7,tsl07,8,tsl08,9,tsl09,10,tsl10,11,tsl11,12,tsl12)) t as month,tsl
        |   ) as a
        |) as b
        |""".stripMargin) //.show()

    burksDF
      //一行转换成多行
      .select($"burk", $"year", explode(tsl_map) as Array("month", "tsl"))
      //获取上年当月
      .withColumn("last_tsl", lag($"tsl", 1, 0) over Window.partitionBy($"burk", $"month").orderBy($"year"))
      //计算增长率
      .withColumn("p", coalesce(round(($"tsl" / $"last_tsl" - 1) * 100, 2), expr("100")))
      .show()
  }
}
