package com.itcj.dmp.etl

import ch.hsr.geohash.GeoHash
import com.maxmind.geoip.{Location, LookupService}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.types.{DoubleType, StringType, StructType}
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}
import org.lionsoul.ip2region.{DbConfig, DbSearcher}


/*- 读取数据集 pmt.json
- 将 IP 转换为经纬度,省市
- 设计 Kudu 表结构, 创建 Kudu 表
- 存入 Kudu 表
*/
object Ip2Region {
  def transProcess(sourceDf:Dataset[Row]): DataFrame = {

    //将 IP 转换为经纬度,省市
    val transSourceDf: RDD[Row] =sourceDf.rdd.mapPartitions(iter2region)


    //- 设计 Kudu 表结构, 创建 Kudu 表
    var schema: StructType = sourceDf.schema.add("region", StringType)
      .add("city", StringType)
      .add("longitude", DoubleType)
      .add("latitude", DoubleType)
      .add("geoHash", StringType)

    val newDf: DataFrame = sourceDf.sparkSession.createDataFrame(transSourceDf,schema)
    newDf
  }





  def iter2region(iterable: Iterator[Row]): Iterator[Row] = {
    val dbSearcher = new DbSearcher(new DbConfig(), "dataset/ip2region.db")
    // 2.创建IP转经纬度对象
    val lookupService = new LookupService("dataset/GeoLiteCity.dat", LookupService.GEOIP_MEMORY_CACHE)
    iterable.map { row => {
      val ip = row.getAs[String]("ip")
      // 查询IP为省市区信息
      val regionArr: Array[String] = dbSearcher.btreeSearch(ip).getRegion.split("\\|")
      // 获取经纬度
      val location: Location = lookupService.getLocation(ip)
      val long: Double = location.longitude.toDouble
      val lat: Double = location.latitude.toDouble
      val geohash: String = GeoHash.geoHashStringWithCharacterPrecision(lat, long, 8)

      // 把转换出的结果拼到之前的数据后边
      val rowSeq: Seq[Any] = row.toSeq :+ regionArr(2) :+ regionArr(3) :+ long :+ lat :+ geohash
      // 转换数据为Row类型
      val newRow: Row = Row.fromSeq(rowSeq)
      newRow
    }
    }
  }
}
