package com.yinanxi.cool

import java.sql.DriverManager
import java.util.Properties
import ch.hsr.geohash.GeoHash
import com.alibaba.fastjson.JSON
import com.yinanxi.cool.beans.LogBean
import com.yinanxi.cool.beans.LogBean2
import org.apache.http.client.methods.HttpGet
import org.apache.http.impl.client.HttpClientBuilder
import org.apache.http.util.EntityUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{Row, SaveMode}

/**
 * @Created by Orion
 * @Description
 */
object RollBackRegion {
  def main(args: Array[String]): Unit = {
    System.setProperty("HADOOP_USER_NAME", "root")
    val session = SparkUtils.getSesion()

    // 1  加载字典表中的数据 ----------------------------------------------------------------------------------------------
    val prop = new Properties()
    prop.setProperty("user", "root")
    prop.setProperty("password", "root")
    val dictDF = session.read.jdbc("jdbc:mysql://localhost:3306/cool?useUnicode=true&characterEncoding=utf8", "tb_area_dict", prop)
    // 解析字典
    val rdd1: RDD[(String, (String, String, String))] = dictDF.rdd.map(row => {
      val geoStr = row.getAs[String]("geostr")
      val province = row.getAs[String]("province")
      val city = row.getAs[String]("city")
      val region = row.getAs[String]("region")
      (geoStr, (province, city, region))
    })
    // 2  字典表中的数据广播 ----------------------------------------------------------------------------------------------
    val mp: Map[String, (String, String, String)] = rdd1.collect().toMap
    // 广播变量
    val bc = session.sparkContext.broadcast(mp)
    // 3 加载session分割后的数据 -------------------------------------------------------------------------------------------
    // 加载session分割后的数据                                                                   接收参数
    val data = session.read.table("tmp.cool_app_session_split").where("dt=' 2023-02-16'")
    // 将DF转换成RDD  使用算子来处理数据
    val rdd2: RDD[Row] = data.rdd

    val rdd3 = rdd2.mapPartitions(iters => {
      //  获取MySQL连接-------------------------------------------------------------一个分区一次连接
      val conn = DriverManager.getConnection("jdbc:mysql://localhost:3306/cool?useUnicode=true&characterEncoding=utf8", "root", "root")
      val ps = conn.prepareStatement("insert  into  tb_area_dict values (? ,? ,? ,?,?,?,?)")
      // 处理分区内的数据
      iters.map(row => {
        //  解析数据  -----------------------------------------------------------------------------------------------------
        var tp: (String, String, String) = null
        // 解析数据
        val account = row.getAs[String]("account")
        val appid = row.getAs[String]("appid")
        val appversion = row.getAs[String]("appversion")
        val carrier = row.getAs[String]("carrier")
        val deviceid = row.getAs[String]("deviceid")
        val devicetype = row.getAs[String]("devicetype")
        val eventid = row.getAs[String]("eventid")
        val ip = row.getAs[String]("ip")
        val latitude = row.getAs[Double]("latitude")
        val longitude = row.getAs[Double]("longitude")
        val nettype = row.getAs[String]("nettype")
        val osname = row.getAs[String]("osname")
        val osversion = row.getAs[String]("osversion")
        val properties = row.getAs[Map[String, String]]("properties")
        val releasechannel = row.getAs[String]("releasechannel")
        val resolution = row.getAs[String]("resolution")
        val sessionid = row.getAs[String]("sessionid")
        val timestamp = row.getAs[Long]("timestamp")
        val new_session = row.getAs[String]("new_session")
        val dt = row.getAs[String]("dt")
        // 根据日志行为数据中的经纬度获取地理位置信息

        // 1) 本地字典中获取------------------------------------------------------------------------------------------------
        val mp = bc.value
        // 将经纬度转成字符串
        val key = GeoHash.geoHashStringWithCharacterPrecision(latitude, longitude, 6)
        val maybeTuple = mp.get(key)
        var res: LogBean2 = null

        if (maybeTuple.isDefined) { //本地字典中有数据
          tp = maybeTuple.get
          res = LogBean2("" ,account, appid, appversion, carrier, deviceid, devicetype, eventid, ip, latitude, longitude, nettype, osname, osversion, properties, releasechannel, resolution, sessionid, timestamp, dt, new_session,tp._1, tp._2, tp._3)

        } else {
          // 2)  从网络中获取数据
          //  本地字典中没有  请求网络  开放API  高德为例   获取数据   写入本地库
          // key  url  经纬度   怎么在代码中请求 url
          val get = new HttpGet(s"https://restapi.amap.com/v3/geocode/regeo?output=json&location=$longitude,$latitude&key=daf1fca9fac7159400d99faf6ef74c74&radius=1000&extensions=bash")
          val httpClient = HttpClientBuilder.create().build()
          // 请求 接收数据
          val response = httpClient.execute(get)
          val entity = response.getEntity
          // 解析entity
          if (entity != null) {
            // 返回json数据
            val str = EntityUtils.toString(entity)
            // fastJson
            val nObject1 = JSON.parseObject(str)
            val address = nObject1.getJSONObject("regeocode").getJSONObject("addressComponent")
            val province = address.getString("province")
            val city = address.getString("city")
            val district = address.getString("district")
            val township = address.getString("township")
            // 封装数据
           res = LogBean2("" ,account, appid, appversion, carrier, deviceid, devicetype, eventid, ip, latitude, longitude, nettype, osname, osversion, properties, releasechannel, resolution, sessionid, timestamp, dt, new_session,province, city, district)


            // 入字典库
            // 3) 将数据存储在本地字典---------------------------------------------------------------------------------------
            ps.setString(1, key)
            ps.setString(2, province)
            ps.setString(3, city)
            ps.setString(4, district)
            ps.setString(5, township)
            ps.setDouble(6, latitude)
            ps.setDouble(7, longitude)
            ps.execute()
          }
        }
        res
      })
    })


    // 回补  地理位置  写出到hive临时表
    import session.implicits._
    // 命名规范
    val resDF = rdd3.toDF()
    resDF.printSchema()

    // 写出
    resDF.write.format("orc").mode(SaveMode.Overwrite).partitionBy("dt").saveAsTable("tmp.cool_app_with_region")

    session.close()
  }

}
