package cn.doitedu.dwh

import org.apache.spark.sql.SparkSession


/**
 * @author 涛哥
 * @nick_name "deep as the sea"
 * @contact qq:657270652 wx:doit_edu
 * @site www.doitedu.cn
 * @date 2021-06-09
 * @desc 设备账号绑定评分计算合并任务
 *
 */
object DeviceAccountBindScore {

  def main(args: Array[String]): Unit = {

    if(args.length < 2) {
      System.err.println(
        """
          |  dismatched arguments!
          |  Usage:
          |    args(0):要计算的日志的日期
          |    args(1):要合并的评分表的日期
          |
          |""".stripMargin)
      sys.exit(1)
    }


    val curLogDay = args(0)
    val preBindDay = args(1)

    val spark = SparkSession
      .builder()
      .appName("设备账号绑定评分计算合并任务")
      //.master("local")
      //.config("spark.sql.shuffle.partitions","2")
      .enableHiveSupport()
      .getOrCreate()


    // 1，对当天日志进行统计
    // [设备号,账号],最后访问时间,绑定评分（100*会话次数）
    val curDayLog = spark.read.table("ods.app_event_log").where(s"dt='${curLogDay}'")
    curDayLog.createTempView("log")
    /**
     * d01,u01,t01,s01
     * d01,u01,t02,s01
     * d01,u01,t03,s01
     * d01,u01,t04,s01
     * d01,u02,t21,s02
     * d01,u02,t22,s02
     * d01,u02,t23,s02
     * d01,u02,t24,s02
     * d01,u02,t25,s02
     * d01,u01,t31,s21
     * d01,u01,t32,s21
     * d01,u01,t33,s21
     * d01,u01,t34,s21
     *
     * tablename: tmp
     * result:
     * d01,u01,200,t34
     * d01,u02,100,t25
     */
    val curDayScore = spark.sql(
      """
        |
        |select
        |deviceid,
        |if(trim(account)='',null,account) as account,
        |count(distinct sessionid)*100 as score,
        |max(timestamp) as last_timestamp
        |
        |from log
        |group by deviceid,account
        |
        |""".stripMargin)
    /**
     * +------------+---------+-----+--------------+
     * |deviceid    |account  |score|last_timestamp|
     * +------------+---------+-----+--------------+
     * |ZpX1kQnedjkr|A0PxIXi  |700  |1622878088953 |
     * |0McjQNB9HuEf|A0PxIXi  |100  |1622877369524 |
     * |m7ftxzJnSw8r|hqRAk6   |100  |1622877373525 |
     * |wcEwcDtQ4VaY|xq1Nlaaop|300  |1622877934511 |
     * |wFF7enPMWxyv|         |100  |1622877147985 |
     */


    // 2，将当天统计结果，去和 T-1日的绑定评分表进行整合（该分数累加的累加，该衰减的衰减）
    val preScore = spark.read.table("dws.device_account_bind_score").where(s"dt='${preBindDay}'")
    curDayScore.createTempView("cur")
    preScore.createTempView("pre")
    val mergedScore = spark.sql(
      s"""
        |-- 3,将最终结果写入hive
        |insert into dws.device_account_bind_score partition(dt='${curLogDay}')
        |
        |select
        | nvl(cur.deviceid,pre.deviceid) as  deviceid,
        | nvl(cur.account,pre.account)  as  account,
        | case
        |   when cur.deviceid is not null and pre.deviceid is not null then cur.score+pre.score
        |   when cur.deviceid is not null and pre.deviceid is null then cur.score
        |   else pre.score*0.4
        | end as  score,
        | if(cur.deviceid is not null,cur.last_timestamp,pre.last_timestamp) as last_timestamp
        |
        |from cur full join pre on cur.deviceid=pre.deviceid and cur.account = pre.account
        |""".stripMargin)
    spark.close()

  }

}
