package com.shujia.spark.sql

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.{Column, DataFrame, SaveMode, SparkSession}
import org.apache.spark.{SparkConf, SparkContext}

object Demo6Test1 {

  def main(args: Array[String]): Unit = {

    val spark: SparkSession = SparkSession
      .builder()
      .appName("test1")
      .master("local")
      .config("spark.sql.shuffle.partitions","1")
      .getOrCreate()

    import spark.implicits._

    import org.apache.spark.sql.functions._

    val burkDF: DataFrame = spark
      .read
      .format("csv")
      .option("sep",",")
      .schema("id String,year String,tsl01 INT,tsl02 INT,tsl03 INT,tsl04 INT,tsl05 INT,tsl06 INT,tsl07 INT,tsl08 INT,tsl09 INT,tsl10 INT,tsl11 INT,tsl12 INT")
      //.schema("id String,year String,income String")
      .load("data/burk.txt")

    val m: Column =map(
      expr("1"),$"tsl01",
      expr("2"),$"tsl02",
      expr("3"),$"tsl03",
      expr("4"),$"tsl04",
      expr("5"),$"tsl05",
      expr("6"),$"tsl06",
      expr("7"),$"tsl07",
      expr("8"),$"tsl08",
      expr("9"),$"tsl09",
      expr("10"),$"tsl10",
      expr("11"),$"tsl11",
      expr("12"),$"tsl12"
    )

    burkDF
      .select($"id",$"year",explode(m).as(Array("month","pic")))
      .withColumn("sumPic",sum($"pic").over(Window.partitionBy($"id",$"year").orderBy($"month")))
      //.show()

    burkDF.createOrReplaceTempView("burk")

    spark.sql(
      """
        |select id,year,month,pic,sum(pic) over(partition by id,year order by month) as sumPic from(
        |select id,year,explode(map(1,tsl01,2,tsl02,3,tsl03,4,tsl04,5,tsl05,6,tsl06,7,tsl07,8,tsl08,9,tsl09,10,tsl10,11,tsl11,12,tsl12)) as (month,pic)
        |from burk )
        |
      """.stripMargin)//.show()

    spark.sql(
      """
        |
        |select id,year,month,pic,sum(pic) over(partition by id,year order by month) sumPic
        |from (
        |select id,year,month,pic from burk
        |lateral view explode(map(1,tsl01,2,tsl02,3,tsl03,4,tsl04,5,tsl05,6,tsl06,7,tsl07,8,tsl08,9,tsl09,10,tsl10,11,tsl11,12,tsl12)) as month,pic )
        |
      """.stripMargin)

    spark.sql(
      """
        |
        |select id,year,month,pic,lastPic,if(isNull(lastPic),1,(pic/lastPic -1)) as pre
        |from(
        |select id,year,month,pic,lag(pic,1) over(partition by id,month order by year) as lastPic
        |from(
        |select id,year,month,pic from burk
        |lateral view explode(map(1,tsl01,2,tsl02,3,tsl03,4,tsl04,5,tsl05,6,tsl06,7,tsl07,8,tsl08,9,tsl09,10,tsl10,11,tsl11,12,tsl12)) as month,pic
        |))
        |
      """.stripMargin)//.show(1000)

    burkDF
      .select($"id",$"year",explode(m).as(Array("month","pic")))
      .withColumn("lastPic",lag($"pic",1).over(Window.partitionBy($"id",$"month").orderBy($"year")))
      .withColumn("Pre",round($"pic" / $"lastPic"-1,8))
      .select($"id",$"year",$"month",$"pic",$"lastPic",when($"Pre".isNull,1.0) otherwise($"Pre") as ("pre"))
      .write
      .mode(SaveMode.Overwrite)
      .option("sep",",")
      .format("csv")
      .save("data/burk")

  }


}
