package com.shujia.tour

import java.awt.geom.Point2D

import com.shujia.common.grid.Grid
import com.shujia.common.poly.Polygon
import com.shujia.common.util.SparkTool
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.sql._

import scala.collection.mutable.ListBuffer

object ScenicTouristJob extends SparkTool {
  override def run(spark: SparkSession): Unit = {

    import spark.implicits._
    import org.apache.spark.sql.functions._

    //加载停留表
    val stayPoint: Dataset[Row] = spark
      .table("dwi.dwi_staypoint_msk_d")
      .where($"day_id" === day_id)


    //进去边界表
    val boundary: DataFrame = spark.table("dim.dim_scenic_boundary")


    //用户 画像表
    val usertag: Dataset[Row] = spark.table("dim.dim_usertag_msk_m")
      .where($"month_id" === month_id)


    //行政区配置表
    val adminCode: Dataset[Row] = spark.table("dim.dim_admincode")


    //将景区边界拉取到Driver端
    val boundryMap: Array[(String, String, String)] = boundary.map(row => {
      val scenic_id: String = row.getAs[String]("scenic_id")
      val scenic_name: String = row.getAs[String]("scenic_name")
      val boundary: String = row.getAs[String]("boundary")

      (scenic_id, scenic_name, boundary)
    }).collect()

    //将景区边界广播
    val broadBound: Broadcast[Array[(String, String, String)]] = spark.sparkContext.broadcast(boundryMap)


    //取出所有景区中所有的点
    //一个人在一个景区中有多条数据
    val scenicPoints: DataFrame = stayPoint.flatMap(row => {

      val listBuffer: ListBuffer[(String, String, String, Double, String, String)] = new ListBuffer[(String, String, String, Double, String, String)]

      val mdn: String = row.getAs[String]("mdn")
      val longi: String = row.getAs[String]("longi")
      val lati: String = row.getAs[String]("lati")
      val duration: Double = row.getAs[String]("duration").toDouble
      val grid_first_time: String = row.getAs[String]("grid_first_time")

      val bound: Array[(String, String, String)] = broadBound.value

      //判断停留点是否在进去中出现

      bound.foreach(kv => {
        val scenic_id: String = kv._1
        val scenic_name: String = kv._2
        val boundary: String = kv._3


        //判断点是否在边界中出现
        val polygon: Polygon = new Polygon(boundary)

        if (polygon.contains(longi.toDouble, lati.toDouble)) {

          val line: (String, String, String, Double, String, String) = (mdn, scenic_id, scenic_name, duration, grid_first_time, boundary)

          listBuffer += line
        }
      })

      listBuffer
    }).toDF("mdn", "scenic_id", "scenic_name", "duration", "grid_first_time", "boundary")

    //计算停留时间，和进入时间
    val scenicTour: DataFrame = scenicPoints.groupBy($"mdn", $"scenic_id", $"scenic_name", $"boundary")
      .agg(sum($"duration") / 60 as "d_stay_time", min($"grid_first_time") as "d_arrive_time")
      .select($"mdn", $"scenic_id", $"scenic_name", $"boundary", $"d_stay_time", $"d_arrive_time")


    //判断网格是否在边界类的函数
    spark.udf.register("polygonContains", (resi_grid_id: String, boundary: String) => {
      val point: Point2D.Double = Grid.getCenter(resi_grid_id.toLong)
      val polygon: Polygon = new Polygon(boundary)
      !polygon.contains(point.getX, point.getY)
    })


    //判断女游客是否在进去工作人员
    val resultDF: DataFrame = scenicTour
      .join(usertag, "mdn")
      .where(expr("polygonContains(resi_grid_id,boundary)"))
      .select($"mdn", $"scenic_id", $"scenic_name", $"resi_county_id", $"d_stay_time", $"d_arrive_time")
      .join(adminCode.hint("broadcast"), $"resi_county_id" === $"county_id")
      .select($"mdn", $"city_id" as "source_city_id", $"scenic_id", $"scenic_name", $"d_arrive_time", $"d_stay_time")

    resultDF.write
      .format("csv")
      .option("sep", "\t")
      .mode(SaveMode.Overwrite)
      .save(s"/daas/motl/dal_tour/dal_tour_scenic_tourist_msk_d/day_id=$day_id")

  }
}
