package com.wzk.dwetl

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.{DataFrame, SparkSession}

/**
 * @author 康哥
 * @nick_name "deep as the sea"
 * @contact qq:657270652 wx:doit_edu
 * @site www.doitedu.cn
 * @date 2021-01-12
 * @desc 设备&账号关联得分计算
 *
 *       -- 目标表结构
 *       CREATE TABLE dwd.device_account_relation
 *       (
 *       deviceid   STRING,
 *       account    STRING,
 *       score      DOUBLE,
 *       first_time BIGINT,
 *       last_time  BIGINT
 *       )
 *       PARTITIONED BY (dt STRING)
 *       STORED AS PARQUET
 *       TBLPROPERTIES('parquet.compress','snappy')
 *       ;
 *
 *
 *       -- 计算策略
 *       基于T-1日的"关联得分表" 和 T日的行为日志，得出T日的"关联得分表"
 *
 *
 *
 *       -- 详细计算策略
 *       先统计T日的会话数，得出 设备-账号 的当日关联得分
 *       然后，将上一步的计算结果，去FULL JOIN T-1日的关联表，进行判断取值即可
 *
 *
 *       -- 结果数据的质量检查代码
 *       select
 *       deviceid
 *       from dwd.device_account_relation
 *       group by deviceid
 *       having count(1)>count(account) and count(1)>1
 *       -- 按条数统计如果大于按账号统计，且条数>1，则说明相同设备上，有带账号，也有不带账号的
 *       +-----------+
 *       | deviceid  |
 *       +-----------+
 *       +-----------+
 *
 *       -- 数值质量检查代码
 *       验证ods中源表的deviceid 基数  与  设备账号关联评分表中的 deviceid 基数
 *       select  count(distinct deviceid) from ods.event_app_log where dt='2021-01-10'
 *       union all
 *       select  count(distinct deviceid) from dwd.device_account_relation where dt='2021-01-10'
 *
 */
object DeviceAccountRelationScoreTest {
  def main(args: Array[String]): Unit = {

    Logger.getLogger("org").setLevel(Level.WARN)

    val spark = SparkSession.builder()
      .master("local[*]")
      .appName(this.getClass.getName)
      .getOrCreate()

    val relation: DataFrame = spark.read.option("header", "true").option("nullValue","\\N").csv("dw_etl/testdata/idbind/2021-01-11/idbind.csv")
    val log = spark.read.option("header", "true").csv("dw_etl/testdata/logdata/2021-01-12/log.csv")
    relation.createTempView("re")
    log.createTempView("log")
    relation.show()
    log.show()


    val joined = spark.sql(
      """
        |
        |select
        | re.deviceid as re_deviceid,
        | re.account as re_account,
        | cast(re.score as double) as re_score,
        | re.first_time as re_first_time,
        | re.last_time as re_last_time,
        | log.deviceid as log_deviceid,
        | log.account as log_account,
        | cast(log.score as double) as log_score,
        | log.time as log_time
        |from
        | re
        |full join
        | log
        |on
        | re.deviceid = log.deviceid and re.account = log.account
        |""".stripMargin)
    joined.createTempView("joined")

    /**
     * +-----------+----------+--------+-------------+------------+------------+-----------+---------+--------+
     * |re_deviceid|re_account|re_score|re_first_time|re_last_time|log_deviceid|log_account|log_score|log_time|
     * +-----------+----------+--------+-------------+------------+------------+-----------+---------+--------+
     * |         d1|        c1|     200|            1|          10|          d1|         c1|      200|      11|
     * |       null|      null|    null|         null|        null|          d1|         c5|      100|      11|
     * |       null|      null|    null|         null|        null|          d6|         c6|      200|      11|
     * |         d3|      null|    null|         null|        null|        null|       null|     null|    null|
     * |       null|      null|    null|         null|        null|          d5|         c5|      200|      11|
     * |         d2|        c2|     800|            2|           8|        null|       null|     null|    null|
     * |         d2|        c3|     600|            3|           7|          d2|         c3|      100|      11|
     * |         d4|        c4|     200|            1|          10|        null|       null|     null|    null|
     * |         d5|      null|    null|         null|        null|        null|       null|     null|    null|
     * |       null|      null|    null|         null|        null|          d7|         ''|     null|    null|
     * +-----------+----------+--------+-------------+------------+------------+-----------+---------+--------+
     */
    val res = spark.sql(
      """
        |
        |select
        | nvl(re_deviceid,log_deviceid) as deviceid,
        | nvl(re_account,log_account) as account,
        | if(re_deviceid is not null and log_deviceid is null,re_score * 0.7,nvl(re_score,0) + log_score) as score,
        | nvl(re_first_time,log_time) as first_time,
        | nvl(log_time,re_last_time) as last_time
        |from
        | joined
        |""".stripMargin)

    res.where("account is null").createTempView("t_null")
    res.where("account is not null").createTempView("t_you")
    val res2 = spark.sql(
      """
        |
        |select
        | nvl(t_you.deviceid,t_null.deviceid) as deviceid,
        | t_you.account as account,
        | t_you.score as score,
        | t_you.first_time as first_time,
        | t_you.last_time as last_time
        |from
        | t_null full join t_you on t_null.deviceid = t_you.deviceid
        |""".stripMargin)
    res2.show()

    spark.close()
  }
}
