package com.doit.dophin.etl

import org.apache.spark.sql.SparkSession

/**
 * @DATE 2022/3/20/21:38
 * @Author MDK
 * @Version 2021.2.2
 *
 * 将app中的日志数据进行清洗,插入临时表中
 *
 * 先创建临时表,放入清洗后的数据
 * create table tmp.mall_applog_washed(
   account            string
   ,app_id            string
   ,app_version       string
   ,carrier           string
   ,device_id         string
   ,device_type       string
   ,event_id          string
   ,ip                string
   ,latitude          double
   ,longitude         double
   ,net_type          string
   ,os_name           string
   ,os_version        string
   ,properties        map<string,string>
   ,release_channel   string
   ,resolution        string
   ,session_id        string
   ,ts                bigint
)
partitioned by (dt string)
stored as orc
tblproperties('orc.compress'='snappy');
 * */
object _1_ApplogWash {
  def main(args: Array[String]): Unit = {
    if(args.length != 2){
      println(
        """
          |usage:必须传入两个参数
          |  参数一:待处理的日期,如2022-02-16
          |  参数二:待处理日期后一天
          |""".stripMargin)
      sys.exit(1)
    }

     val spark = SparkSession.builder()
       .appName("app端日志数据清洗,过滤")
       .master("local")
       .enableHiveSupport()
       .getOrCreate()

    val dt:String = args(0)
    val dt_next:String = args(1)

    /*
    * 几个关键点:
    *   1.关键字段不能缺失
    *   2.JSON格式要正确
    *   3.数据时间在要查找的时间范围内
    *   4.运行程序前先建临时表,存储清洗后的数据mall_applog_washed
    * */
    spark.sql(
      s"""
        |insert into table tmp.mall_applog_washed partition(dt='${dt}')
        |select
        |   if(trim(account)='',null,account) as account
        |   ,appid                 as  app_id
        |   ,appversion            as  app_version
        |   ,carrier               as  carrier
        |   ,deviceid              as  device_id
        |   ,devicetype            as  device_type
        |   ,eventid               as  event_id
        |   ,ip                    as  ip
        |   ,latitude              as  latitude
        |   ,longitude             as  longitude
        |   ,nettype               as  net_type
        |   ,osname                as  os_name
        |   ,osversion             as  os_version
        |   ,properties            as  properties
        |   ,releasechannel        as  release_channel
        |   ,resolution            as  resolution
        |   ,sessionid             as  session_id
        |   ,`timestamp`           as  ts
        |from ods.mall_app_log
        |where dt='${dt}'
        |-- 关键字段不能缺失
        |and eventid is not null and trim(eventid) != ''
        |and sessionid is not null and trim(sessionid) != ''
        |and deviceid is not null and trim(deviceid) != ''
        |and properties is not null
        |-- 数据时间必须在指定的时间内
        |and timestamp >= unix_timestamp('${dt}','yyyy-MM-dd')*1000 and timestamp < unix_timestamp('${dt_next}','yyyy-MM-dd')*1000
        |""".stripMargin)

    spark.close()

    //之后去hive上查看ods和tem上的表中是否有数据
    // select *(count(1) -- 查看行数) from ods.mall_app_log where dt='2022-04-15' limit 10;
  }
}
