package cn.doitedu.dw_etl.logetl

import ch.hsr.geohash.GeoHash
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FSDataInputStream, FileSystem, Path}
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.lionsoul.ip2region.{DbConfig, DbSearcher}

/**
 * ODS APP日志抽取到DWD任务
 *
 * 所有代码集中在一个类中的版本
 *
 */
object OdsAppLog2DwdWhole {
  def main(args: Array[String]): Unit = {
    import cn.doitedu.commons.utils.CustomUdfs._


    val spark = SparkSession.builder()
      .appName("ODS APP日志抽取到DWD任务")
      .master("local[*]")
      .enableHiveSupport() // 开启spark读取hive数据的支持
      .getOrCreate()

    // 读取当日要处理的ods层数据表的分区
    val ods_app = spark.read.table("ODS.EVENT_APP_LOG").where("dt='2020-08-31'")

    spark.udf.register("isblank", isblank)
    /**
     * 过滤
     */
    val filterd = ods_app
      .where("!(isblank(deviceid) and isblank(account))")
      .where("properties is not null and !isblank(eventid) and !isblank(sessionid)")
      .where("to_date(from_unixtime(cast(timestamp/1000 as bigint))) = '2020-08-31'")
    // 爬虫过滤 TODO

    filterd.show(50, false)


    /**
     * TODO session分割
     */

    /**
     * 地理位置维度集成
     */
    // 加载geohash字典库
    // "geo","province","city","region"
    val areadictDF: DataFrame = spark.read.parquet("/dict_data/geohash_area_dict/")
    // 将parquet中的字典数据转成kv结构，并收集到driver端
    val areaDictMap = areadictDF.rdd.map(row => {
      val geo = row.getAs[String]("geo")
      val province = row.getAs[String]("province")
      val city = row.getAs[String]("city")
      val region = row.getAs[String]("region")
      (geo, (province, city, region))
    }).collectAsMap()
    // 广播出去
    val bc1 = spark.sparkContext.broadcast(areaDictMap)


    // 读取ip地理位置字典库文件
    val conf = new Configuration()
    val fs = FileSystem.get(conf)
    val len: Long = fs.getFileStatus(new Path("/dict_data/ip_area_dict/ip2region.db")).getLen
    val fsin: FSDataInputStream = fs.open(new Path("/dict_data/ip_area_dict/ip2region.db"))
    val bytes = new Array[Byte](len.toInt)
    fsin.read(bytes)
    val bc2 = spark.sparkContext.broadcast(bytes)


    // 加载当天生成的最新idmapping映射字典
    val idmapDictDF = spark.read.json("/dict_data/idmp_dict/2020-08-31")
    val idmpMap = idmapDictDF
      .select("deviceid", "guid")
      .where("!isblank(deviceid)")
      .rdd
      .map(row => {
        val deviceid = row.getAs[String]("deviceid")
        val guid = row.getAs[String]("guid")
        (deviceid, guid)
      })
      .collectAsMap()
    val bc3 = spark.sparkContext.broadcast(idmpMap)


    filterd.rdd.map(row => {

      val idmpDict = bc3.value


      val account = row.getAs[String]("account")
      val appid = row.getAs[String]("appid")
      val appversion = row.getAs[String]("appversion")
      val carrier = row.getAs[String]("carrier")
      val deviceid = row.getAs[String]("deviceid")
      val devicetype = row.getAs[String]("devicetype")
      val eventid = row.getAs[String]("eventid")
      val ip = row.getAs[String]("ip")
      val latitude = row.getAs[Double]("latitude")
      val longitude = row.getAs[Double]("longitude")
      val nettype = row.getAs[String]("nettype")
      val osname = row.getAs[String]("osname")
      val osversion = row.getAs[String]("osversion")
      val releasechannel = row.getAs[String]("releasechannel")
      val resolution = row.getAs[String]("resolution")
      val sessionid = row.getAs[String]("sessionid")
      val timestamp = row.getAs[Long]("timestamp")
      val properties = row.getAs[Map[String, String]]("properties")

      var province = ""
      var city = ""
      var region = ""

      // 根据gps坐标，去geohash字典库中匹配省市区
      val areaDict: collection.Map[String, (String, String, String)] = bc1.value
      // 将本条日志数据中的gps坐标转成geohash编码
      if (latitude != null && longitude != null && latitude > -90 && latitude < 90 && longitude > -180 && longitude < 180) {
        val geo = GeoHash.geoHashStringWithCharacterPrecision(latitude, longitude, 5)
        val (province, city, region) = areaDict.getOrElse(geo, ("", "", ""))
      }

      // 如果gps匹配失败，则用ip地址去匹配省市区
      if (isblank(province + city + region)) {
        val bytearray = bc2.value
        val config = new DbConfig()
        val dbSearcher = new DbSearcher(config, bytearray)
        val block = dbSearcher.memorySearch(ip)
        val areaInfo = block.getRegion.split("\\|")
        province = areaInfo(2)
        city = areaInfo(3)
      }

      // 为每条数据标记GUID
      val guid = if(isblank(account)) idmpDict.get(deviceid).get else account

      // 新老访客标识


    })


    spark.close()
  }
}
