package cn.doitedu.dw_etl.logetl

import java.io.File

import ch.hsr.geohash.GeoHash
import cn.doitedu.dw_etl.utils.{DictLoadUtils, EventBeanUtils}
import org.apache.commons.io.FileUtils
import org.apache.commons.lang3.StringUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FSDataInputStream, FileSystem, Path}
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}
import org.lionsoul.ip2region.{DbConfig, DbSearcher}

/**
 * ODS APP日志抽取到DWD任务
 */
object OdsAppLog2Dwd {
  def main(args: Array[String]): Unit = {

    Logger.getLogger("org").setLevel(Level.WARN)

    if(args.size<3){
      println(
        """
          |Usage:
          | args(0)： spark任务运行模式
          | args(1):  要处理的日志的分区日期
          | args(2):  要处理的日期的前一日
          |""".stripMargin)
      sys.exit(1)
    }
    import cn.doitedu.commons.utils.CustomUdfs._
    import org.apache.spark.sql.functions._


    val spark = SparkSession.builder()
      .appName("ODS APP日志抽取到DWD任务")
      .master(s"${args(0)}")
      .enableHiveSupport() // 开启spark读取hive数据的支持
      .getOrCreate()
    import spark.implicits._

    // 读取当日要处理的ods层数据表的分区
    val ods_app: Dataset[Row] = spark.read.table("ODS.EVENT_APP_LOG").where(s"dt='${args(1)}'")

    spark.udf.register("isblank", isblank)
    /**
     * 过滤
     */
    val filterd = ods_app
      .where("!(isblank(deviceid) and isblank(account))")
      .where("properties is not null and !isblank(eventid) and !isblank(sessionid)")
      .where(s"to_date(from_unixtime(cast(timestamp/1000 as bigint))) = '${args(1)}'")
    // 爬虫过滤 TODO

    //filterd.show(50, false)


    /**
     * TODO session分割
     */

    /**
     * 地理位置维度集成
     */
    // 加载geohash字典库
    val areaDictMap = DictLoadUtils.loadGeoHashDict(spark)
    val bc1 = spark.sparkContext.broadcast(areaDictMap)


    // 读取ip地理位置字典库文件
    val ip2Regionbytes: Array[Byte] = DictLoadUtils.loadIp2RegionDbFile()
    val bc2 = spark.sparkContext.broadcast(ip2Regionbytes)


    // 加载当天生成的最新idmapping映射字典
    val idmpMap = DictLoadUtils.loadIdmappingDict(spark, s"/dict_data/idmp_dict/${args(1)}")
    val bc3 = spark.sparkContext.broadcast(idmpMap)

    // 加载所有历史出现过的deviceid和account
    val (deviceIds,accounts) = DictLoadUtils.loadDeviceIdsAndAccounts(spark,s"${args(2)}")
    val bc4 = spark.sparkContext.broadcast(deviceIds)
    val bc5 = spark.sparkContext.broadcast(accounts)

    val finalResult = filterd.rdd.mapPartitions(iter=>{
      val areaDict = bc1.value   // geo地理位置匹配库
      val ip2Regionbytearray = bc2.value //ip地址匹配库
      val idmpDict = bc3.value  // idmapping匹配库
      val deviceIdSet = bc4.value  // 所有历史出现过的设备id
      val accountsSet = bc5.value  // 所有历史出现过的account

      iter.map(row => {
        val appEventBean = EventBeanUtils.row2AppEventBean(row)

        var province = ""
        var city = ""
        var region = ""
        var isnew = 1  // 1 表示新用户

        // 根据gps坐标，去geohash字典库中匹配省市区

        // 将本条日志数据中的gps坐标转成geohash编码
        if (appEventBean.latitude != null && appEventBean.longitude != null && appEventBean.latitude > -90 && appEventBean.latitude < 90 && appEventBean.longitude > -180 && appEventBean.longitude < 180) {
          val geo = GeoHash.geoHashStringWithCharacterPrecision(appEventBean.latitude, appEventBean.longitude, 5)
          val areaInfo = areaDict.getOrElse(geo, ("", "", ""))
          province = areaInfo._1
          city = areaInfo._2
          region = areaInfo._3
          if(StringUtils.isNotBlank(province)) println(province)
        }

        province = ""
        city = ""
        region = ""

        // 如果匹配失败，则用ip地址去匹配省市区
        if (isblank(province + city + region)) {

          val config = new DbConfig()
          val dbSearcher = new DbSearcher(config, ip2Regionbytearray)
          val block = dbSearcher.memorySearch(appEventBean.ip)
          if(block != null && block.getRegion.split("\\|").size>4) {
            val areaInfo = block.getRegion.split("\\|")
            province = areaInfo(2)
            city = areaInfo(3)
          }
        }

        // 为每条数据标记GUID
        val guid = if(isblank(appEventBean.account)) idmpDict.get(appEventBean.deviceid).get else appEventBean.account

        // 新老访客标识
        if(deviceIdSet.contains(appEventBean.deviceid) || accountsSet.contains(appEventBean.account)) isnew = 0

        appEventBean.province = province
        appEventBean.city = city
        appEventBean.region = region
        appEventBean.guid = guid
        appEventBean.isnew = isnew

        appEventBean
      })
    }).toDF()

    // finalResult.where("!isblank(province)").show(10,false)
    finalResult.createTempView("res")
    spark.sql(
      s"""
        |
        |insert into table dwd.event_app_detail partition(dt='${args(1)}')
        |select
        |account         ,
        |appid           ,
        |appversion      ,
        |carrier         ,
        |deviceid        ,
        |devicetype      ,
        |eventid         ,
        |ip              ,
        |latitude        ,
        |longitude       ,
        |nettype         ,
        |osname          ,
        |osversion       ,
        |releasechannel  ,
        |resolution      ,
        |sessionid       ,
        |timestamp  as ts,
        |properties      ,
        |province        ,
        |city            ,
        |region          ,
        |isnew           ,
        |guid
        |from res
        |
        |""".stripMargin)

    spark.close()
  }
}
