package com.wzz.sparkProject

import org.apache.spark.sql.SparkSession

object AppLogWash {

  def main(args: Array[String]): Unit = {
    if(args.length == 0){
      println("请提供时间参数")
      System.exit(0)
    }
    val time = args(0)
    println(s"参数1 ${time}")

    val spark: SparkSession = SparkUtils.getSparkSession("数据清洗-AppLogWash")

    val sql =
      s"""
         |
         |insert overwrite table tmp.event_log_washed
         |partition(dt='${time}')
         |select
         |  account        ,
         |  appid          ,
         |  appversion     ,
         |  carrier        ,
         |  deviceid       ,
         |  devicetype     ,
         |  eventid        ,
         |  ip             ,
         |  latitude       ,
         |  longitude      ,
         |  nettype        ,
         |  osname         ,
         |  osversion      ,
         |  properties     ,
         |  releasechannel ,
         |  resolution     ,
         |  sessionid      ,
         |  `timestamp`
         |from ods.app_event_log
         |where dt = '${time}'
         |      and  deviceid is not null and trim(deviceid) != ''
         |      and  eventid is not null and trim(eventid) != ''
         |      and  properties is not null and size(properties) != 0
         |          and  sessionid is not null and trim(sessionid) != ''
         |          and  unix_timestamp(to_utc_timestamp('${time}','GMT+8'),'yyyy-MM-dd')*1000 <= `timestamp`
         |          and  unix_timestamp(to_utc_timestamp(date_add('${time}',1),'GMT+8'),'yyyy-MM-dd')*1000 >  `timestamp`
         |
         |""".stripMargin

    spark.sql(sql)

    spark.stop()
  }
}
