package com.doit.dophin.etl

import ch.hsr.geohash.GeoHash
import com.doit.dophin.utils.{LogBean, Row2LogBean}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.commons.io.IOUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{Row, SparkSession}
import org.lionsoul.ip2region.{DbConfig, DbSearcher}

import java.net.URI
import java.util.Properties





/**
 * @DATE 2022/3/22/15:41
 * @Author MDK
 * @Version 2021.2.2
 *
 * hive中读取session切割后的数据
 * 拿出经纬度,变成geohash码在dim.ref_geo表中查询
 * 查不到再用ip地址查询地理位置信息,采用RDD
 *
 create table tmp.mall_applog_area(
    account              string
    ,app_id              string
    ,app_version         string
    ,carrier             string
    ,device_id           string
    ,device_type         string
    ,event_id            string
    ,ip                  string
    ,latitude            double
    ,longitude           double
    ,net_type            string
    ,os_name             string
    ,os_version          string
    ,properties          map<string,string>
    ,release_channel     string
    ,resolution          string
    ,session_id          string
    ,ts                  bigint
    ,new_session_id      string
    ,province            string
    ,city                string
    ,region              string
)
partitioned by (dt string)
stored as orc
tblproperties('orc.compress'='snappy');
 *
 * */
case class GeoArea(geohash:String, province:String, city:String, region:String)

object ApplogRegionProcess {
  def main(args: Array[String]): Unit = {

    if(args.length != 1){
      println(
        """
          |usage: 请至少输入一个指定日期的参数
          |  参数一:待处理的日期,如2022-03-24
          |""".stripMargin)
      sys.exit(1)
    }

    val dt:String = args(0)

    val spark = SparkSession.builder()
      .appName("地理位置信息集成")
      .master("local")
      .enableHiveSupport()
      .getOrCreate()

    //读取geohash的地理位置参考表
    val geoRef = spark.read.table("dim.ref_geo").where("geohash is not null")

    //将参数封装成case class,收集到driver端的hashmap集合中
    val areaMap: collection.Map[String, GeoArea] = geoRef.rdd.map({
      case Row(geohash:String, province:String, city:String, region:String)=>(geohash,GeoArea(geohash,province,city,region))
    }).collectAsMap()

    //将数据广播出去
    val bc1 = spark.sparkContext.broadcast(areaMap)


    //用单机IO读取ip2region的参考字典库文件,将ip获取的文件广播出去
    //val ip2RegionDbBytes: Array[Byte] = FileUtils.readFileToByteArray(new File("data/ip2region/ip2region.db"))

    //加载配置参数,获取hdfs地址
    val properties = new Properties()
    properties.load(ApplogRegionProcess.getClass.getClassLoader.getResourceAsStream("db.properties"))
    //从hdfs上读取ip字典库文件到一个字节数组
    val fs = FileSystem.get(new URI(properties.getProperty("hdfs.path") + "/ip2region/ip2region.db"), new Configuration(), "root")
    val path = new Path("/ip2region/ip2region.db")
    val fsDataInputStream = fs.open(path)
    //获取ip2region的字节长度
    val status = fs.getFileStatus(path)
    //准备直接数组,用io工具将文件流全部输入到这个字节数组中
    val ip2RegionDbBytes = new Array[Byte](status.getLen.toInt)
    IOUtils.readFully(fsDataInputStream,ip2RegionDbBytes)

    //把ip字典库里的字节数组广播出去
    val bc2 = spark.sparkContext.broadcast(ip2RegionDbBytes)

    //读取session分割后的日志表
    val applog = spark.read.table("tmp.mall_applog_session_split").where(s"dt='${dt}'")

    //查找ip2Region或者ip找出对应省市区
    val areaedApplog: RDD[LogBean] = applog.rdd.mapPartitions(iter=>{

      //优化处理广播变量,取出来广播变量之后就可以解析ip了
      val areaDict = bc1.value
      val config = new DbConfig()

      val ip2RegionBytes = bc2.value
      val searcher = new DbSearcher(config,ip2RegionBytes)

      iter.map(row=>{
        var province="未知"
        var city="未知"
        var region="未知"

        //将一条数据转换成 logbean对象
        val logBean = Row2LogBean.row2LogBean(row)


        //从logbean对象中抽取gps坐标,去字典中查找地理位置
        val geoHashCode = GeoHash.geoHashStringWithCharacterPrecision(logBean.latitude, logBean.longitude, 5)
        val maybeArea = areaDict.get(geoHashCode)
        if(maybeArea.isDefined){
          val area: GeoArea = maybeArea.get

          //赋值
          province = area.province
          city = area.city
          region = area.region
        }else{
          val ip = logBean.ip
          //解析ip
          val regionStr = searcher.memorySearch(ip).getRegion

          //分割获取ip地址并对省市赋值
          val splits = regionStr.split("\\|")
          if(splits.length>=5 && !splits(3).equals("内网IP")){
            province = splits(2)
            city = splits(3)
          }
        }

        //将省市区结果放入logbean,返回
        logBean.province = province
        logBean.city = city
        logBean.region = region

        logBean
      })
    })

    //将结果写入临时的parquet文件
    import spark.implicits._
    areaedApplog.toDS().createTempView("tmp")
    spark.sql(
      s"""
        |
        |insert into table tmp.mall_applog_area partition(dt='${dt}')
        |select * from tmp
        |
        |""".stripMargin)

    spark.close()
  }
}
