package com.itcj.dmp.area

import com.itcj.dmp.utils.{HttpUtils, KuduHelper}
import org.apache.spark.sql.types.{StringType, StructField}
import org.apache.spark.sql.{DataFrame, SparkSession}

object BussinessAreaRuner {

  val odsTablename = "ODS_" + KuduHelper.today()
  val areaTablename = "ODS_Area"

  def process(): Unit = {
    //    1. 创建SparkSession
    import com.itcj.dmp.utils.SparkConfigHelper._
    val sparkSession: SparkSession = SparkSession.builder()
      .master("local[2]")
      .appName("Ip2Region")
      .loadConfig()
      .getOrCreate()
    sparkSession.sparkContext.setLogLevel("WARN")

    //      2. 读取ods和area表
    import com.itcj.dmp.utils.KuduHelper._
    val odsmf = sparkSession.readKuduTable(odsTablename)
    val areamf = sparkSession.readKuduTable(areaTablename)
    var resultArea:DataFrame=null

    //      3. 判断是否取到ODS和Area
    //      3.1 如果取不到ods,直接结束程序
    if (odsmf.isEmpty) {
      println("ODS表不存在,任务结束")
      return
    }
    //    3.2 如果取到ods,area表没取到,则直接进行全部的经纬度转换

    //注册函数
    import com.itcj.dmp.utils.HttpUtils._
    sparkSession.udf.register("jsonProcess",jsonProcess _)


    import org.apache.spark.sql.functions._
    if (areamf.isEmpty) {
      val odfdf = odsmf.get

      val resultArea1 =odfdf.select("geohash", "longitude", "latitude")
        .groupBy("geohash")
        //        .agg("longitude" -> "avg","latitude" ->"avg")
        .agg(round(avg("longitude"),5) as "longitude",round(avg("latitude"),5) as "latitude")
        .selectExpr("geohash","jsonProcess(longitude,latitude) as area")
      resultArea=resultArea1.where(resultArea1.col("area") .isNotNull)

      resultArea.printSchema()
      resultArea.show()
    }

    //    3.3 如果ods和area都有,那么需要进行ods和area的join,过滤出area为null的数据,再进行转换
    if(areamf.isDefined) {
      println(1111111111)
      val odfdf = odsmf.get
      val areadf = areamf.get
      val newodfdf =odfdf.select("geohash", "longitude", "latitude")
     /* areadf.show()
      newodfdf.show()
      resultArea=newodfdf.join(areadf,areadf.col("geohash") === newodfdf.col("geohash"),"left")
        .where(areadf.col("area")==="None" or (areadf.col("area").isNull))
//        .selectExpr("geohash","jsonProcess(longitude,latitude) as area")
        .groupBy(newodfdf.col("geohash"))
        //        .agg("longitude" -> "avg","latitude" ->"avg")
        .agg(round(avg("longitude"),5) as "longitude",round(avg("latitude"),5) as "latitude")
        .select(newodfdf.col("geohash"),expr("jsonProcess(longitude,latitude) as area"))
*/
      newodfdf.createOrReplaceTempView("newodfdf")
      areadf.createOrReplaceTempView("areadf")
      val resultArea1=newodfdf.sparkSession.sql(
        """
          |select
          |a.geohash as ageohash,a.area,newodfdf.geohash as ngeohash,
          |longitude,
          |latitude
          |from newodfdf LEFT join areadf as a
          |on newodfdf.geohash = a.geohash
           where a.area is null
        """.stripMargin)
      resultArea1.show()
      resultArea1.createOrReplaceTempView("resultArea1")
      val resultArea2=resultArea1.sparkSession.sql(
        """
          |select ageohash,collect_set(area),collect_set(ngeohash),
          |round(avg(longitude),5) as longitude,
          |round(avg(latitude),5) as latitude
          |from resultArea1
          |group by ageohash
        """.stripMargin)
      resultArea2.createOrReplaceTempView("resultArea2")
      resultArea2.show()
      resultArea=resultArea2.sparkSession.sql(
        """
          |select ageohash as geohash,jsonProcess(longitude,latitude) as area
          |from resultArea2
        """.stripMargin)

      resultArea.show()



    }

    //    4. 数据落地
    if (resultArea != null) {
      resultArea.schema.fields.update(0,new StructField("geohash",StringType,false))
       sparkSession.createKuduTable(areaTablename,resultArea.schema,List("geohash"),false)
      resultArea.saveKuduTable(areaTablename)
    }
  }


  def main(args: Array[String]): Unit = {
    process()
    val m1: Iterable[Int] =Map(("a",1)).values.take(1)
  }
}
