package homework

import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.{Column, DataFrame, SparkSession}


object Demo5Work {

  def main(args: Array[String]): Unit = {

    val spark: SparkSession = SparkSession
      .builder()
      .appName("burk")
      .master("local")
      .config("spark.sql.shuffle.partitions", 1)
      .getOrCreate()
    import spark.implicits._
    import org.apache.spark.sql.functions._

    //读取数据
    val burksDF: DataFrame = spark
      .read
      .format("csv")
      .option("sep", ",")
      .schema("burk STRING,year STRING,tsl01 DOUBLE,tsl02 DOUBLE,tsl03 DOUBLE,tsl04 DOUBLE,tsl05 DOUBLE,tsl06 DOUBLE,tsl07 DOUBLE,tsl08 DOUBLE,tsl09 DOUBLE,tsl10 DOUBLE,tsl11 DOUBLE,tsl12 DOUBLE")
      .load("data/burks.txt")


    /**
      burksDF
        //一行转换成多行
        .select($"burk", $"year", explode(m) as Array("month", "plc"))
        //计算按月累计
        .withColumn("leiji", sum($"plc") over Window.partitionBy($"burk", $"year").orderBy($"month"))
        .show(100)
     */

    //DSL
    val m: Column = map(
      expr("1"), $"tsl01",
      expr("2"), $"tsl02",
      expr("3"), $"tsl03",
      expr("4"), $"tsl04",
      expr("5"), $"tsl05",
      expr("6"), $"tsl06",
      expr("7"), $"tsl07",
      expr("8"), $"tsl08",
      expr("9"), $"tsl09",
      expr("10"), $"tsl10",
      expr("11"), $"tsl11",
      expr("12"), $"tsl12"
    )
    burksDF
      //一行转换成多行
      .select($"burk", $"year", explode(m) as Array("month", "plc"))
      .withColumn("ahead",lag($"plc",1) over Window.partitionBy($"burk", $"month").orderBy($"year"))
      .withColumn("zengZhang",($"plc"/$"ahead")-1)
      .show(100)



  }
}
/**
 * 2、统计每个公司当月比上年同期增长率  行转列 --> lag窗口函数
公司代码,年度,月度,增长率（当月收入/上年当月收入 - 1）
 *
 */