package com.shujia.dim

import com.shujia.common.utils.SparkTool
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

object DimScenicGrid extends SparkTool {
  override def run(spark: SparkSession): Unit = {
    import spark.implicits._
    import org.apache.spark.sql.functions._

    // 分别加载景区边界及网格配置表
    val geoTagGridDF: DataFrame = spark.table("dim.dim_geotag_grid")
    val scenicBoundaryDF: DataFrame = spark.table("dim.dim_scenic_boundary")

    // 将两表进行笛卡尔积
    geoTagGridDF
      .crossJoin(scenicBoundaryDF)
      // 取出网格的中心点在景区边界内的数据
      .where(isInScenicBoundary($"center_longi", $"center_lati", $"boundary"))
      .select($"grid_id", $"scenic_id", $"scenic_name")
      // 将景区包含的所有的网格id构建成集合Set 再转换成字符串
      .groupBy($"scenic_id", $"scenic_name")
      .agg(concat_ws(",", collect_set($"grid_id")) as "grids")
      .write
      .format("csv")
      .option("sep", "\t")
      .mode(SaveMode.Overwrite)
      .save(s"/daas/motl/dim/dim_scenic_grid")

  }
}
