package com.shujia.tour

import com.shujia.spark.SparkTool
import com.shujia.util.{Geography, SSXRelation}
import org.apache.spark.sql.hive.HiveContext
import com.shujia.bean.CaseClass.Province
import org.apache.spark.sql.SaveMode

object TourProvinceApp extends SparkTool {
  /**
    * 编写业务逻辑
    *
    */
  override def run(): Unit = {
    val hiveContext = new HiveContext(sc)
    import hiveContext.implicits._


    val provincePath = Constants.PROVINCE_PATH + Constants.PARTITION_NAME + day_id

    //获取用户常住地数据
    val resiRDD = hiveContext.sql(
      """
        |select mdn,resi_grid_id,resi_county_id from
        |dim.dim_usertag_msk_m
        |where month_id=201805
        |
      """.stripMargin)
      .map(row => {
        val mdn = row.getAs[String]("mdn")
        val resi_grid_id = row.getAs[String]("resi_grid_id")
        val resi_county_id = row.getAs[String]("resi_county_id")

        (mdn, resi_grid_id + Constants.SPLIT + resi_county_id)
      })


    val staypointRDD = hiveContext.sql(
      s"""
         |select mdn,grid_id,county_id,duration from
         |dwi.dwi_staypoint_msk_d
         |where day_id=$day_id
         |
      """.stripMargin)
      .map(row => {
        val mdn = row.getAs[String]("mdn")
        val grid_id = row.getAs[String]("grid_id")
        val county_id = row.getAs[String]("county_id")
        val duration = row.getAs[String]("duration")

        //通过区县编号获取省编号
        val provinceId = SSXRelation.COUNTY_PROVINCE.get(county_id)


        (mdn, s"$grid_id${Constants.SPLIT}$duration${Constants.SPLIT}$provinceId")

      })


    staypointRDD
      .join(resiRDD)
      .map(kv => {
        val mdn = kv._1
        val staypoint = kv._2._1.split(Constants.SPLIT)
        val resi = kv._2._2.split(Constants.SPLIT)

        val grid_id = staypoint(0)
        val duration = staypoint(1)
        val provinceId = staypoint(2)


        val resi_grid_id = resi(0)
        val resi_county_id = resi(1)


        //以手机号和省编号作为key进行分组 判断这个人是不是这个省的游客
        (mdn + Constants.SPLIT + provinceId, s"$duration${Constants.SPLIT}$resi_grid_id${Constants.SPLIT}$resi_county_id${Constants.SPLIT}$grid_id")
      })
      .groupByKey()
      .map(kv => {
        val mdnAndPid = kv._1.split(Constants.SPLIT)
        val mdn = mdnAndPid(0)
        val pId = mdnAndPid(1)

        //一个人在一个省内的数据=
        val datas = kv._2.toList


        //计算出游巨鹿
        var d_max_distance = datas.map(line => {
          val split = line.split(Constants.SPLIT)
          val resi_grid_id = split(1).toLong
          val grid_id = split(3).toLong
          //计算常住地到目的地的距离  米级别
          Geography.calculateLength(resi_grid_id, grid_id)
        }).max //取最远距离

        //停留时间
        var d_stay_time = datas.map(line => {
          line.split(Constants.SPLIT)(0).toLong
        }).sum / 60.0


        //来源地区县
        val resi_county_id = datas.head.split(Constants.SPLIT)(2)

        //保留两位小数
        d_stay_time = d_stay_time.formatted("%.2f").toDouble
        d_max_distance = d_max_distance.formatted("%.2f").toDouble

        Province(mdn, resi_county_id, pId, d_stay_time, d_max_distance)
      })
      .filter(line => {
        val d_stay_time = line.d_stay_time
        val d_max_distance = line.d_max_distance

        /**
          *
          * 游客定义
          * 出行距离大于10km
          * 常住地在用户画像表中
          *
          * 在省内停留时间大于3个小时
          *
          */

        d_stay_time > 3 && d_max_distance > 10 * 1000
      })
      .toDF()
      .write
      .mode(SaveMode.Overwrite)
      .parquet(provincePath)


  }

  /**
    * 初始化 设置 spark 运行参数
    *
    */
  override def init(): Unit = {

  }
}
