package com.shujia.sql

import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.{Column, DataFrame, SparkSession}

object Test2 {
  def main(args: Array[String]): Unit = {
    val ss: SparkSession = SparkSession.builder()
      .master("local")
      .appName("spark sql练习2")
      .config("spark.sql.shuffle.partitions", "1")
      .getOrCreate()

    import ss.implicits._
    import org.apache.spark.sql.functions._


    val burksDF: DataFrame = ss.read
      .format("csv")
      .option("sep", ",")
      .schema("burk STRING,year STRING," +
        "tsl01 LONG,tsl02 LONG,tsl03 LONG,tsl04 LONG," +
        "tsl05 LONG,tsl06 LONG,tsl07 LONG,tsl08 LONG," +
        "tsl09 LONG,tsl10 LONG,tsl11 LONG,tsl12 LONG")
      .load("spark/data/burks.txt")

    //纯sql的方式完成
//    burksDF.createOrReplaceTempView("burks")
//    ss.sql(
//      """
//        |
//        |select
//        |t1.burk as burk,
//        |t1.year as year,
//        |t1.month as month,
//        |t1.sell as sell,
//        |sum(sell) over(partition by burk,year order by month) as leiji_sell
//        |from
//        |(
//        |select
//        |burk,
//        |year,
//        |month,
//        |sell
//        |from
//        |burks
//        |lateral view explode(map(1,tsl01,2,tsl02,3,tsl03,4,tsl04,5,tsl05,6,tsl06,7,tsl07,8,tsl08,9,tsl09,10,tsl10,11,tsl11,12,tsl12)) T as month,sell
//        |) t1
//        |
//        |""".stripMargin).show()

    val m: Column = map(
      expr("'1'"), $"tsl01",
      expr("'2'"), $"tsl02",
      expr("'3'"), $"tsl03",
      expr("'4'"), $"tsl04",
      expr("'5'"), $"tsl05",
      expr("'6'"), $"tsl06",
      expr("'7'"), $"tsl07",
      expr("'8'"), $"tsl08",
      expr("'9'"), $"tsl09",
      expr("'10'"), $"tsl10",
      expr("'11'"), $"tsl11",
      expr("'12'"), $"tsl12",
    )
    burksDF.select($"burk",$"year",explode(m) as Array("month","sell"))
      .withColumn("next_sell",sum($"sell") over Window.partitionBy($"burk",$"year").orderBy($"month".cast("INT")))
      .show()

  }
}
