package com.wzk.dwetl

import java.text.SimpleDateFormat
import java.util.UUID



import ch.hsr.geohash.GeoHash
import com.wzk.dwetl.beans.AppLogBean
import com.wzk.dwetl.utils.Row2AppLogBean
import org.apache.commons.lang3.StringUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FSDataInputStream, FileStatus, FileSystem, Path}
import org.apache.spark.SparkFiles
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession
import org.lionsoul.ip2region.{DbConfig, DbSearcher}

/**
 * @author 康哥
 * @nick_name "deep as the sea"
 * @contact qq:657270652 wx:doit_edu
 * @site www.doitedu.cn
 * @date 2021-01-14
 * @desc ods层app端行为日志数据，处理为dwd明细表
 */
object EventApp2DwdTable {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      //.master("local[*]")
      .appName(this.getClass.getSimpleName)
      .enableHiveSupport()
      .getOrCreate()

    import spark.implicits._
    /**
     * 加载各种字典数据，并广播
     */
    val geodf = spark.read.parquet("/geo/")
    val geomap = geodf.rdd.map(row => {
      val geohash = row.getAs[String]("geo")
      val province = row.getAs[String]("province")
      val city = row.getAs[String]("city")
      val dist = row.getAs[String]("dist")
      (geohash, (province, city, dist))
    }).collectAsMap()
    val bc1 = spark.sparkContext.broadcast(geomap)

    //添加缓存文件
    /*spark.sparkContext.addFile("dw_etl/testdata/ip2region/ip2region.db")
    geodf.rdd.map(row => {
      val path = SparkFiles.get("ip2region.db")
      new DbSearcher(new DbConfig(),path)
    })*/

    val fs = FileSystem.get(new Configuration())
    val path = new Path("/dicts/region/ip2region.db")
    val in: FSDataInputStream = fs.open(path)

    //将字典文件，以字节形式读取并缓存字节buffer里
    val statuses: Array[FileStatus] = fs.listStatus(path)
    val len = statuses(0).getLen
    val buffer = new Array[Byte](len.toInt)
    in.readFully(0,buffer)

    val bc2 = spark.sparkContext.broadcast(buffer)

    //val relation = spark.read.table("dwd.device_account_relation").where("dt='2021-01-11'")

    val relation = spark.sql(
      """
        |select
        | deviceid,
        | account
        |from
        |(
        |   select
        |    deviceid,
        |    account,
        |    row_number() over(partition by deviceid order by score desc,last_time desc) as rn
        |   from
        |    dwd.device_account_relation
        |   where dt='2021-01-11'
        |) o
        |where rn = 1
        |""".stripMargin)

    val relationMap = relation.rdd.map(row => {
      val deviceid = row.getAs[String]("deviceid")
      val account = row.getAs[String]("account")
      (deviceid, account)
    }).collectAsMap()
    val bc3 = spark.sparkContext.broadcast(relationMap)

    val ids = spark.read.table("dwd.device_account_relation")
      .where("dt='2021-01-10'")
      .selectExpr("explode (array(deviceid,account)) as id")
      .map(row => row.getAs[String]("id")).collect().toSet

    val bc4 = spark.sparkContext.broadcast(ids)

    val ods = spark.read.table("ods.event_app_log").where("dt='2021-01-11'")
    val beanRdd = ods.rdd.map(row => {
      Row2AppLogBean.row2AppLogBean(row)
    })

    val filtered: RDD[AppLogBean] = beanRdd.filter(bean => {
      var flag = true
      if (!StringUtils.isNotBlank(bean.deviceid) && bean.properties != null && StringUtils.isNotBlank(bean.eventid) && StringUtils.isNotBlank(bean.sessionid)) {
        flag = false
      }
      val sdf = new SimpleDateFormat("yyyy-MM-dd")
      val validStart = sdf.parse("2021-01-11").getTime
      val validEnd = sdf.parse("2021-01-12").getTime
      if (bean.timestamp < validStart || bean.timestamp >= validEnd)
        flag = false
      flag
    })

    val sessionSplitted: RDD[AppLogBean] = filtered.groupBy(bean => {
      bean.sessionid
    }).flatMapValues(iter => {
      val sortedEvents = iter.toList.sortBy(bean => bean.timestamp)
      var tmpsessionid = UUID.randomUUID().toString
      for (i <- 0 until sortedEvents.size) {
        sortedEvents(i).newsessionid = tmpsessionid
        if (i < sortedEvents.size - 1 && sortedEvents(i + 1).timestamp - sortedEvents(i).timestamp > 30 * 60 * 1000) {
          tmpsessionid = UUID.randomUUID().toString
        }
      }
      sortedEvents
    }).map(_._2)

   // sessionSplitted.toDF.show()\
    //验证切割效果
    /*sessionSplitted.toDF.createTempView("tmp")
    spark.sql(
      """
        |select
        | sessionid,
        | count(distinct newsessionid) as cnt
        |from
        | tmp
        |group by
        | sessionid
        |having
        | count(distinct newsessionid) > 1
        |""".stripMargin).show(100,false)*/

    val aread: RDD[AppLogBean] = sessionSplitted.mapPartitions(iter => {
      val geoDict: collection.Map[String, (String, String, String)] = bc1.value
      val ip2Region: Array[Byte] = bc2.value

      val searcher = new DbSearcher(new DbConfig(), ip2Region)
      iter.map(bean => {
        var country: String = "UNKNOWN"
        var province: String = "UNKNOWN"
        var city: String = "UNKNOWN"
        var region: String = "UNKNOWN"
        try {
          val latitude = bean.latitude
          val longitude = bean.longitude
          val geo = GeoHash.geoHashStringWithCharacterPrecision(latitude, longitude, 5)

          val area = geoDict.getOrElse(geo, ("UNKNOWN", "UNKNOWN", "UNKNOWN"))
          country = "CN"
          province = area._1
          city = area._2
          city = area._3
        } catch {
          case e: Exception => e.printStackTrace()
        }

        if ("UNKNOWN".equals(province)) {
          val block = searcher.memorySearch(bean.ip)
          try {
            val split = block.getRegion.split("\\|")
            country = split(0)
            province = split(2)
            city = split(3)
          } catch {
            case e: Exception => e.printStackTrace()
          }
        }
        bean.country = country
        bean.province = province
        bean.city = city
        bean.region = region
        bean
      })
    })

    val guided = aread.mapPartitions(iter => {
      val deviceBindAccountDict = bc3.value
      iter.map(bean => {
        var guid: String = null
        if (StringUtils.isNotBlank(bean.account)) {
          guid = bean.account

        } else {
          val findedAccount = deviceBindAccountDict.getOrElse(bean.deviceid, null)
          //如果查询的结果不为null
          if (findedAccount != null) {
            guid = findedAccount
          } else {
            guid = bean.deviceid
          }
        }
        bean.guid = guid
        bean
      })
    })

    val result = guided.mapPartitions(iter => {
      val idSet = bc4.value
      iter.map(bean => {
        var isnew = "1"
        if (idSet.contains(bean.deviceid) || idSet.contains(bean.account)) isnew = "0"

        bean.isnew = isnew
        bean
      })
    }).toDF()
    result.createTempView("result")
    spark.sql(
      """
        |
        |insert into table dwd.event_app_detail partition(dt='2021-01-11')
        |select
        |account            ,
        |appid              ,
        |appversion         ,
        |carrier            ,
        |deviceid           ,
        |devicetype         ,
        |eventid            ,
        |ip                 ,
        |latitude           ,
        |longitude          ,
        |nettype            ,
        |osname             ,
        |osversion          ,
        |properties         ,
        |releasechannel     ,
        |resolution         ,
        |sessionid          ,
        |timestamp          ,
        |newsessionid       ,
        |country            ,
        |province           ,
        |city               ,
        |region             ,
        |guid               ,
        |isnew
        |
        |from result
        |
        |""".stripMargin)



    spark.close()
  }
}
