package com.doit.dophin.etl

import org.apache.spark.sql.SparkSession

/**
 * @DATE 2022/3/24/15:20
 * @Author MDK
 * @Version 2021.2.2
 *
 *  建表
 create table dwd.mall_applog_detail(
   guid               bigint
   ,account           string
   ,app_id            string
   ,app_version       string
   ,carrier           string
   ,device_id         string
   ,device_type       string
   ,event_id          string
   ,ip                string
   ,latitude          double
   ,longitude         double
   ,net_type          string
   ,os_name           string
   ,os_version        string
   ,properties        map<string,string>
   ,release_channel   string
   ,resolution        string
   ,session_id        string
   ,ts                bigint
   ,new_session_id    string
   ,province          string
   ,city              string
   ,region            string
)
partitioned by (dt string)
stored as orc
tblproperties('orc.compress'='snappy');
 *
 * */
object _4_ApplogGuid {
  def main(args: Array[String]): Unit = {
    if(args.length != 1){
      println(
        """
          |usage:需要传入一个参数
          |  参数一:请输入指定日期参数,如2022-03-24
          |
          |""".stripMargin)
      sys.exit(1)
    }

    val dt:String = args(0)

    val spark: SparkSession = SparkSession.builder()
      .appName("生成用户guid")
      .master("local")
      .enableHiveSupport()
      .getOrCreate()

    //读取hive中清洗,过滤,规范化,session分割,地理位置信息集成之后的临时表
    val logTable = spark.read.table("tmp.mall_applog_area").drop("dt").where(s"dt=${dt}")
    logTable.cache()

    //将日志数据分为两部分:带账号信息,不带账号信息
    val hasAccount = logTable.where("account is not null")
    val noAccount = logTable.where("trim(account)='' or account is null")

    //查找"设备账号绑定关系表",得到表中每个device_id所绑定的得分最高账号,得分一样选择登录时间最近的账号
    val deviceBind = spark.sql(
      s"""
        |select
        |  device_id,
        |  account,
        |  row_number() over(partition by device_id order by weight desc, last_login desc) as rn
        |from dws.mall_app_device_account_bind
        |where dt= '${dt}'
        |
        |""".stripMargin).where("rn=1")

    //为不带账号的数据,去"设备账号绑定表"中查找账号
    noAccount.createTempView("noAccount")
    deviceBind.createTempView("device_bind")
    val tmp2 =spark.sql(
      """
        |select
        |  nvl(t2.account,null) as account
        |  ,t1.app_id
        |  ,t1.app_version
        |  ,t1.carrier
        |  ,t1.device_id
        |  ,t1.device_type
        |  ,t1.event_id
        |  ,t1.ip
        |  ,t1.latitude
        |  ,t1.longitude
        |  ,t1.net_type
        |  ,t1.os_name
        |  ,t1.os_version
        |  ,t1.properties
        |  ,t1.release_channel
        |  ,t1.resolution
        |  ,t1.session_id
        |  ,t1.ts
        |  ,t1.new_session_id
        |  ,t1.province
        |  ,t1.city
        |  ,t1.region
        |from noAccount t1 left join device_bind t2 on t1.device_id=t2.device_id
        |
        |""".stripMargin)

    tmp2.cache()

    //将tmp2的数据分成两部分:找到账号的,没找到账号的
    val findAccount = tmp2.where("account is not null")
    val notFindAccount = tmp2.where("account is null")

    //本来就有账号的 union all 找到了账号的,去关联"用户注册信息表",得到user_id
    hasAccount.unionAll(findAccount).createTempView("hasAccount")
    val part1 = spark.sql(
      """
        |select
        |  t2.user_id,
        |  t1.*
        |from hasAccount t1
        |join dwd.user_reg_info t2
        |on t1.account=t2.account
        |
        |""".stripMargin)

    //没找到账号的数据,去找"空设备id映射表",得到user_id
    notFindAccount.createTempView("notFindAccount")
    val part2 = spark.sql(
      """
        |select
        |  t2.user_id,
        |  t1.*
        |from notFindAccount t1
        |join dws.mall_app_device_tmpid t2
        |on t1.device_id = t2.device_id
        |""".stripMargin)

    //最后,将这两部分都拥有user_id的数据union在一起,得到最终结果
    part1.createTempView("part1")
    part2.createTempView("part2")
    spark.sql(
      s"""
        |insert into table dwd.mall_applog_detail partition(dt='${dt}')
        |select * from part1
        |union all
        |select * from part2
        |""".stripMargin)

    spark.close()
  }
}
