package com.doit.doitdata.rule

import ch.hsr.geohash.GeoHash
import org.apache.spark.sql.SparkSession
import redis.clients.jedis.Jedis

/**
  * Created by hunter.coder 涛哥  
  * 2019/4/8 14:48
  * 交流qq:657270652
  * Version: 1.0
  * 更多学习资料：https://blog.csdn.net/coderblack/
  * Description: 省市区地理位置信息知识库构造程序
  **/
object AreaRuleConstructor {

  def main(args: Array[String]): Unit = {

    val spark = SparkSession
      .builder()
      .master("local[*]")
      .appName(AreaRuleConstructor.getClass.getSimpleName)
      .getOrCreate()

    import spark.implicits._

    // 加载字典文件
    val ds1 = spark.read.textFile("G:\\sharkdata\\areadict")

    // 切割数据
    val ds2 = ds1.map(line => {
      val fields = line.split("\\(")(1).split(",")

      val id = fields(0).trim
      val name_tmp = fields(1).trim
      val name = name_tmp.substring(1, name_tmp.length - 1) // 去掉首尾引号
      val pid = fields(2).trim
      val level = fields(4).trim
      val lngStr = fields(10).trim
      val latStr = fields(11).split("\\)")(0).trim
      (id, name, pid, level, lngStr, latStr)

    })

    ds2.cache()

    val dfProvince = ds2.filter(_._4.equals("1")).toDF("id", "name", "pid", "level", "lngstr", "latstr")
    val dfCity = ds2.filter(_._4.equals("2")).toDF("id", "name", "pid", "level", "lngstr", "latstr")
    val dfDistrict = ds2.filter(_._4.equals("3")).toDF("id", "name", "pid", "level", "lngstr", "latstr")
    val dfCounty = ds2.filter(_._4.equals("4")).toDF("id", "name", "pid", "level", "lngstr", "latstr")

    /*dfProvince.show(10,false)
    dfCity.show(10,false)
    dfDistrict.show(10,false)*/

    // 注册临时表
    dfProvince.createTempView("province")
    dfCity.createTempView("city")
    dfDistrict.createTempView("district")
    dfCounty.createTempView("county")

    // join关联
    val resDf = spark.sql(
      """
        |select
        |d.id,
        |p.name as province_name,
        |c.name as city_name,
        |d.name as district_name,
        |t.name as county_name,
        |t.lngstr,
        |t.latstr
        |from province p
        |join city c
        |on  p.id = c.pid
        |join district d
        |on d.pid = c.id
        |join county t
        |on t.pid = d.id
        |
      """.stripMargin)


    /*resDf.show(10,false)*/


    // 将拼接好的数据存入redis
    resDf.foreachPartition(part_iter => {

      // 建redis连接
      val jedis = new Jedis("c701", 6379)
      jedis.select(1) // 选择1号库，来存储省市区地理位置知识

      part_iter.foreach(row => {
        try {
          // 取出一条数据中的经度和纬度
          val lng = row.getAs[String]("lngstr").toDouble
          val lat = row.getAs[String]("latstr").toDouble

          // 取出一条数据中的省市区名称
          val province = row.getAs[String]("province_name")
          val city = row.getAs[String]("city_name")
          val district = row.getAs[String]("district_name")
          val county = row.getAs[String]("county_name")

          // 将经纬度转成geohash编码
          val geohashcode = GeoHash.withCharacterPrecision(lat, lng, 5).toBase32

          // 将geohash编码作为key，省市区名称作为value，存入redis

          jedis.set(geohashcode, Array(province, city, district, county).mkString(","))
        } catch {
          case e: Exception =>
        }

      })

      // 关闭redis链接
      jedis.close()
    })


    spark.close()

  }

}
