import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.types.{DataTypes, StructType}

object IdmpDemo {
  def main(args: Array[String]): Unit = {
    Logger.getLogger("org").setLevel(Level.WARN)
    val spark = SparkSession.builder()
      .appName("")
      .master("local")
      .getOrCreate()

    val todSchema = new StructType()
        .add("did",DataTypes.StringType)
        .add("uid",DataTypes.StringType)
        .add("ts",DataTypes.StringType)

    val tod = spark.read.option("header", true).option("nullvalue","\\N").schema(todSchema).csv("dw_etl/data/idmpdemo/log/day02")
    tod.show(100,false)

    tod.createTempView("tod")
    // 找出每个设备每个账号的第一条，并评分
    spark.sql(
      """
        |
        |select
        |did,uid,ts,row_number() over(partition by did,uid order by ts) as rn1
        |from tod
        |
        |""".stripMargin).show(100,false)

    val todScore = spark.sql(
      """
        |
        |select
        |did,uid,ts,1000-((row_number() over(partition by did order by ts))-1) * 100 as score
        |
        |from
        |(
        |select
        |did,uid,ts,row_number() over(partition by did,uid order by ts) as rn1
        |from tod
        |) o
        |where rn1=1
        |
        |
        |""".stripMargin).selectExpr("did","uid","ts","if(uid is not null,score,null) as score")



    val his = spark.read.option("header", true).option("nullvalue","\\N").csv("dw_etl/data/idmpdemo/his/day01")

    val u = todScore.union(his)
    u.show(100,false)
    u.createTempView("u")
    val sum = spark.sql(
      """
        |
        |select
        |did,uid,min(ts) as ts,sum(score) as score
        |from u
        |group by did,uid
        |
        |
        |""".stripMargin)
    sum.show(100,false)

    sum.createTempView("sumt")
    val combine = spark.sql(
      """
        |select
        |
        |did,collect_list(struct(uid,ts,score)) as lst,min(guid) as guid
        |
        |from (
        |select
        |did,uid,ts,score,first_value(if(uid is not null,uid,did)) over(partition by did order by score desc,ts asc) as guid
        |from
        |sumt
        |) o
        |group by did
        |
        |
        |""".stripMargin).selectExpr("did","lst","guid","md5(guid)")
      combine.show(100,false)


    spark.close()

  }

}
