package com.doit.pre

import ch.hsr.geohash.GeoHash
import com.alibaba.fastjson.JSON
import org.apache.calcite.avatica.org.apache.http.client.methods.HttpGet
import org.apache.calcite.avatica.org.apache.http.impl.client.HttpClients
import org.apache.calcite.avatica.org.apache.http.util.EntityUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Dataset, Row, SaveMode, SparkSession}

import java.sql.DriverManager
import java.util.Properties

/**
 * @Date: 23.6.5 
 * @Author: Hang.Nian.YY
 * @qq: 598196583
 * @Tips: 学大数据 ,到多易教育
 * @Description:
 * 集成地理位置
 * 1 根据经纬度  集成地理位置信息
 *
 *
 */
object IntegratedGeographicLocation {
  def main(args: Array[String]): Unit = {
    // 设置系统操作HDFS的用户
    System.setProperty("HADOOP_USER_NAME" , "root")
    val session = SparkSession.builder()
      .master("local[*]")
      .appName(this.getClass.getSimpleName)
      .enableHiveSupport()
      .getOrCreate()

    /**
     * 前提: 加载本地 地理位置字典数据
     * 将数据封装成Map集合   广播出去
     */
    val url = "jdbc:mysql://localhost:3306/doe39?characterEncoding=UTF8"
    val table = "tb_areas_dict"
    val properties = new Properties()
    properties.setProperty("user", "root")
    properties.setProperty("password", "root")

    val mysqlDF = session.read.jdbc(url, table, properties)
    val  mapData = mysqlDF.rdd.map(row=>{
      val str = row.getAs[String]("geo_key")
      val province = row.getAs[String]("province")
      val city = row.getAs[String]("city")
      val area = row.getAs[String]("area")
      (str,(province,city,area))
    }).collectAsMap()
    // 广播出去
    val bc = session.sparkContext.broadcast(mapData)


    // 加载hive的ODS层的表数据
    /**
     * enableHiveSupport  , spark可以直接操作hive的数据
     * 直接连接的hive的元数据服务
     */
    // val df: DataFrame = session.table("doe39.ods_app_event_log").where("dt='2023-06-02'")

    /**
     * 第一步:  对ods的数据进行 核心字段缺失数据的过滤
     */
    val frame = session.sql(
      """
        |select
        |*
        |from
        |doe39.ods_app_event_log
        |where dt = '2023-06-02'
        |and sessionid is not null  and  trim(sessionid) != ''
        |and  eventid is not null  and  trim(eventid) != ''
        |""".stripMargin)

    /**
     * root
     * |-- account: string (nullable = true)
     * |-- appid: string (nullable = true)
     * |-- carrier: string (nullable = true)
     * |-- deviceid: string (nullable = true)
     * |-- devicetype: string (nullable = true)
     * |-- eventid: string (nullable = true)
     * |-- ip: string (nullable = true)
     * |-- latitude: double (nullable = true)
     * |-- longitude: double (nullable = true)
     * |-- nettype: string (nullable = true)
     * |-- osname: string (nullable = true)
     * |-- osversion: string (nullable = true)
     * |-- properties: map (nullable = true)
     * |    |-- key: string
     * |    |-- value: string (valueContainsNull = true)
     * |-- releasechannel: string (nullable = true)
     * |-- resolution: string (nullable = true)
     * |-- sessionid: string (nullable = true)
     * |-- timestamp: long (nullable = true)
     * |-- dt: string (nullable = true)
     */

    /**
     * 第二步: 解析数据  获取经纬度信息  根据经纬度获取地理位置
     * 将DF转变成rdd编程  灵活方便
     */

   // frame.printSchema()
    val res: RDD[LogBean] = frame.rdd.mapPartitions(iters => { // 每个分区执行一次
      // 每个分区一次JDBC连接
      val conn = DriverManager.getConnection(url, "root", "root")
      val ps = conn.prepareStatement("insert  into  tb_areas_dict values (?,?,?,?,?,?)")
      iters.map(
        row => {
          val account = row.getAs[String]("account")
          val appid = row.getAs[String]("appid")
          val carrier = row.getAs[String]("carrier")
          val deviceid = row.getAs[String]("deviceid")
          val devicetype = row.getAs[String]("devicetype")
          val eventid = row.getAs[String]("eventid")
          val ip = row.getAs[String]("ip")
          //  TODO  解析所有字段
          val latitude = row.getAs[Double]("latitude")
          val longitude = row.getAs[Double]("longitude")
          val nettype = row.getAs[String]("nettype")
          val osname = row.getAs[String]("osname")
          val osversion = row.getAs[String]("osversion")
          val properties = row.getAs[Map[String, String]]("properties")
          val releasechannel = row.getAs[String]("releasechannel")
          val resolution = row.getAs[String]("resolution")
          val sessionid = row.getAs[String]("sessionid")
          val timestamp = row.getAs[Long]("timestamp")
          val dt = row.getAs[String]("dt")
          var logBean: LogBean = null
          //  TODO  解析所有字段
          // 根据经纬度  关联地理位置 (mysql)
          val str = GeoHash.withCharacterPrecision(latitude, longitude, 6).toBase32
          // 从广播变量中获取数据
          val map = bc.value
          val tp: Option[(String, String, String)] = map.get(str)
          if (tp.isDefined) { //本地有数据
            val value = tp.get
            logBean = LogBean(account, appid, carrier, deviceid, devicetype, eventid, ip, latitude, longitude, nettype, osname, osversion, properties, releasechannel, resolution, sessionid, timestamp, dt, value._1, value._2, value._3)
          } else { // 本地字典库中没有数据
            // 1)  请求网络获取数据    请求第三方 API
            // 代码请求     返回数据json   解析数据
            val client = HttpClients.createDefault()
            // 创建请求对象
            val httpGet = new HttpGet(s"https://api.map.baidu.com/reverse_geocoding/v3/?ak=18i6fQLQURn2RRPM8XSO1qH65OS2MXRD&output=json&coordtype=wgs84ll&location=$latitude,$longitude")
            // 发送请求  接收数据
            val response = client.execute(httpGet)
            // 获取返回数据的内容
            val content = response.getEntity
            // 解析内容
            val contentStr = EntityUtils.toString(content)
            val dataObject = JSON.parseObject(contentStr)
            val addressObject = dataObject.getJSONObject("result").getJSONObject("addressComponent")
            val province = addressObject.getString("province")
            val city = addressObject.getString("city")
            val district = addressObject.getString("district")
            // 2) 丰富本地字典库
            (province, city, district)
            ps.setString(1, str)
            ps.setString(2, province)
            ps.setString(3, city)
            ps.setString(4, district)
            ps.setDouble(5, latitude)
            ps.setDouble(6, longitude)
            ps.execute()
            logBean = LogBean(account, appid, carrier, deviceid, devicetype, eventid, ip, latitude, longitude, nettype, osname, osversion, properties, releasechannel, resolution, sessionid, timestamp, dt, province, city, district)
          }
          logBean
        }
      )
    })

    import org.apache.spark.sql.functions._
    import session.implicits._
    //将集成好的数据 写入到hive的临时表中   统一字段命名  统一字段数据格式(bean)
    val resDF = res.toDF(
      "account",
      "app_id",
      "carrier",
      "device_id",
      "device_type",
      "event_id" ,
      "ip" ,
      "latitude" ,
      "longitude" ,
      "net_type" ,
      "os_name" ,
      "os_version" ,
      "properties" ,
      "release_channel" ,
      "resolution" ,
      "session_id" ,
      "ts" ,
      "dt" ,
      "province" ,
      "city" ,
      "district"
    )
    // 权限被拒接
    resDF.write.format("orc").mode(SaveMode.Overwrite).partitionBy("dt").saveAsTable("doe39.pre_app_log_with_areas")
    session.close()


  }

}
