package doit20.datayi.etl

import java.io.{File, FileInputStream}
import java.util
import java.util.{Date, Properties, UUID}

import ch.hsr.geohash.GeoHash
import com.alibaba.fastjson.JSON
import com.google.gson.Gson
import doit20.datayi.pojo.ApplogBean
import org.apache.commons.io.FileUtils
import org.apache.commons.lang.StringUtils
import org.apache.commons.lang.time.DateUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.{DataFrame, Dataset, Row, SaveMode, SparkSession}
import org.lionsoul.ip2region.{DbConfig, DbSearcher}
import redis.clients.jedis.Jedis

import scala.collection.immutable

/**
 * @author 涛哥
 * @nick_name "deep as the sea"
 * @contact qq:657270652 wx:doit_edu
 * @site www.doitedu.cn
 * @date 2021-04-16
 * @desc app日志数据从ODS处理到DWD
 */
object ApplogODS2DWD {
  def main(args: Array[String]): Unit = {
    if(args.size<1){
      System.err.println(
        """
          |
          |usage:
          |  args(0): 计算日期
          |
          |""".stripMargin)
      sys.exit(1)
    }

    var dt = args(0)

    val spark = SparkSession.builder()
      .appName("app日志数据从ODS处理到DWD")
      //.master("local")
      .enableHiveSupport()
      .getOrCreate()
    import spark.implicits._
    import org.apache.spark.sql.functions._


    /*
     * json解析打平
     *
     */
    val applog: Dataset[String] = spark.read.textFile(s"/user/hive/warehouse/doit20ods.db/app_event_log/dt=${dt}")

    val beanDataset: Dataset[Option[ApplogBean]] = applog.map(json => {

      var maybeBean: Option[ApplogBean] = Option.empty

      try {
        val obj = JSON.parseObject(json)
        val account = obj.getString("account")
        val appId = obj.getString("appId")
        val appVersion = obj.getString("appVersion")
        val carrier = obj.getString("carrier")
        val deviceId = obj.getString("deviceId")
        val deviceType = obj.getString("deviceType")
        val eventId = obj.getString("eventId")
        val ip = obj.getString("ip")
        val latitude = obj.getDouble("latitude")
        val longitude = obj.getDouble("longitude")
        val netType = obj.getString("netType")
        val osName = obj.getString("osName")
        val osVersion = obj.getString("osVersion")

        val propertiesObj = obj.getJSONObject("properties")
        val map1: util.Map[String, AnyRef] = propertiesObj.getInnerMap
        import scala.collection.JavaConverters._
        val properties: Map[String, String] = map1.asScala.mapValues(v => v.toString).toMap

        val releaseChannel = obj.getString("releaseChannel")
        val resolution = obj.getString("resolution")
        val sessionId = obj.getString("sessionId")
        val timeStamp = obj.getLong("timeStamp")


        maybeBean = Some(ApplogBean(account, appId, appVersion, carrier, deviceId,
          deviceType, eventId, ip, latitude, longitude, netType, osName, osVersion, properties,
          releaseChannel, resolution, sessionId, timeStamp))

      } catch {
        case e: Exception => e.printStackTrace()
      }

      maybeBean
    })


    /**
     * 清洗过滤
     */
    val filtered: Dataset[ApplogBean] = beanDataset
      .filter(maybeBean => maybeBean.isDefined) // 过滤json解析失败的数据
      .map(maybeBean => maybeBean.get)
      .filter(bean => { // 过滤json解析成功但是缺失关键字段的数据
        var flag = true
        if (bean.sessionId == null || bean.eventId == null || bean.deviceId == null || bean.properties == null) flag = false


        val start = DateUtils.parseDate(s"${dt}", Array("yyyy-MM-dd")).getTime
        val end = start + 24*60*60*1000
        if (bean.timeStamp < start || bean.timeStamp > end) flag = false

        flag
      })

    /**
     * 数据规范化 TODO
     */


    /**
     * session分割
     *
     * d01,u01,event01,props1,t11,session01,news01
     * d01,u01,event01,props1,t12,session01,news01
     * d01,u01,event01,props1,t15,session01,news01
     * d01,u01,event01,props1,t20,session01,news02
     * d01,u01,event01,props1,t21,session01,news02
     * d01,u01,event01,props1,t24,session01,news02
     */
    val sessionSplitedRdd: RDD[ApplogBean] = filtered.rdd
      .groupBy(bean => bean.sessionId)
      .mapValues(iter => {
        val sortedBeans = iter.toList.sortBy(_.timeStamp)

        var newSessionId = UUID.randomUUID().toString

        val tmp: immutable.Seq[ApplogBean] = for (i <- 0 until sortedBeans.size - 1) yield {
          sortedBeans(i).newsession = newSessionId
          if (sortedBeans(i + 1).timeStamp - sortedBeans(i).timeStamp > 30 * 60 * 1000) newSessionId = UUID.randomUUID().toString
          sortedBeans(i)
        }
        tmp
      }).flatMap(_._2)


    /*
     * 地理位置集成
     */
    // 加载并广播mysql中的geo地理位置字典表
    val props = new Properties()
    props.setProperty("user", "root")
    props.setProperty("password", "ABC123abc.123")
    val areaTable = spark.read.jdbc("jdbc:mysql://hdp01:3306/abc", "area_flat", props)
    val kvArea: RDD[(String, (String, String, String))] = areaTable.rdd.map({
      case Row(province: String, city: String, region: String, lng: Double, lat: Double, geohash: String)
             => (geohash, (province, city, region))
      case _ => ("", ("", "", ""))
    })
    val geoAreaMap = kvArea.collectAsMap()
    val bc1 = spark.sparkContext.broadcast(geoAreaMap)

    // 读取并广播ip地址库数据
    val ipDbBytes = FileUtils.readFileToByteArray(new File("ip2region.db"))
    val bc2 = spark.sparkContext.broadcast(ipDbBytes)


    // 查询匹配地理位置信息
    val areaBeanRdd: RDD[ApplogBean] = sessionSplitedRdd.map(bean => {
      // 获取geo地理位置字典表
      val geoDict: collection.Map[String, (String, String, String)] = bc1.value

      // 构造ip地址搜索器
      val ipBytes = bc2.value
      val config = new DbConfig()
      val searcher = new DbSearcher(config, ipBytes)

      // 定义省市区初始值
      var province: String = null
      var city: String = null
      var region: String = null


      // 取出当前行为记录中的gps坐标并转成geohash编码
      val lat = bean.latitude
      val lng = bean.longitude
      val geoHashCode = GeoHash.geoHashStringWithCharacterPrecision(lat, lng, 5)

      // 优先从geo字典中匹配省市区
      val areaOption = geoDict.get(geoHashCode)
      if (areaOption.isDefined) {
        val area = areaOption.get
        province = area._1
        city = area._2
        region = area._3
      }
      // 如果geo字典匹配失败，再从ip地址库中匹配
      else {
        val regionInfo = searcher.memorySearch(bean.ip).getRegion
        if (!regionInfo.contains("内网")) {
          val split = regionInfo.split("\\|")
          province = if (split(2).equals("0")) null else split(2)
          city = if (split(3).equals("0")) null else split(3)
        }
      }

      // 将匹配后的结果对logbean赋值
      bean.province = province
      bean.city = city
      bean.region = region

      bean
    }

    )



    /**
     * 全局唯一标识
     * 新老访客标记
     */
    // 加载绑定评分表
    val bindScoreTable: DataFrame = spark.read.table("doit20dws.device_account_bind").where(s"dt='${dt}'")

    // 从绑定评分表中，计算：每个设备绑定的分数最高的账号
    val spec = Window.partitionBy('device_id).orderBy('score desc, 'last_access)
    val bindAccount = bindScoreTable.where("account is not null and trim(account)!=''").select('device_id, 'account, row_number() over (spec) as "rn").where("rn=1")

    // （利用“设备-账号”绑定评分表，回补匿名记录中的账号）
    bindAccount.createTempView("bindAccount")
    areaBeanRdd.toDS().createTempView("beanlog")
    val bindAccountDs = spark.sql(
      s"""
        |select
        |   a.account,
        |   a.appId           ,
        |   a.appVersion      ,
        |   a.carrier         ,
        |   a.deviceId        ,
        |   a.deviceType      ,
        |   a.eventId         ,
        |   a.ip              ,
        |   a.latitude        ,
        |   a.longitude       ,
        |   a.netType         ,
        |   a.osName          ,
        |   a.osVersion       ,
        |   a.properties      ,
        |   a.releaseChannel  ,
        |   a.resolution      ,
        |   a.sessionId       ,
        |   a.timeStamp       ,
        |   a.newsession      ,
        |   a.province        ,
        |   a.city            ,
        |   a.region          ,
        |   a.isnew           ,
        |   a.guid            ,
        |   if((trim(a.account) ='' or a.account is null)  and b.account is not null,b.account,a.account) as bindAccount,
        |   '${dt}' as dt
        |
        |from beanlog a left join bindAccount b
        |on a.deviceId = b.device_id
        |
        |""".stripMargin).as[ApplogBean]

    //bindAccountDs.where("(account is null or trim(account)='') and (bindAccount is not null and trim(bindAccount) !='')").show(50,false)

    // 从redis中查询全局唯一标识（guid）
    val guidDataset: Dataset[ApplogBean] = bindAccountDs.mapPartitions(iter=>{
      val jedis = new Jedis("hdp02", 6379)

      val res: Iterator[ApplogBean] = iter.map(bean => {
        val account = bean.bindAccount
        val deviceId = bean.deviceId

        var guidStr:String = null


        // 如果本条数据中，有account，则先用account去查询redis
        if(StringUtils.isNotBlank(account)){
          guidStr = jedis.get(account)

          // 如果查询结果为null，则继续用设备id查询
          if(StringUtils.isBlank(guidStr)) {
            guidStr = jedis.get(deviceId)

            // 如果通过deviceid查到了结果，则更新redis中的key
            if(StringUtils.isNotBlank(guidStr)){

              // 添加一个 （账号->guid）
              jedis.set(account,guidStr)
              // 删掉原来的（设备->guid）
              jedis.del(deviceId)

            }else{
              // 插入一个新的guid数据
              guidStr = jedis.incr("max_guid")+""
              jedis.set(account,guidStr)
              // 设置新老访客标记为1
              bean.isnew = 1
            }
          }

        }
        // 如果本条数据中，没有account，则直接用deviceid去查redis
        else{
          guidStr = jedis.get(deviceId)

          // 如果查询结果为空，说明这个设备是第一次出现（且没有任何绑定账号）
          if(StringUtils.isBlank(guidStr)){
            // 插入一个新的guid数据
            guidStr = jedis.incr("max_guid")+""
            jedis.set(deviceId,guidStr)
            // 设置新老访客标记为1
            bean.isnew = 1
          }
        }

        bean.guid = guidStr.toLong
        bean
      })

      jedis.close();

      // 返回结果
      res
    })


    guidDataset.write.mode(SaveMode.Append).partitionBy("dt").saveAsTable("doit20dwd.app_event_detail")


    spark.close()
  }

}
