package com.shujia.spark.sql

import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.{DataFrame, SparkSession}

object Demo05DianXin {
  def main(args: Array[String]): Unit = {

    val spark: SparkSession = SparkSession
      .builder()
      .master("local")
      .appName(this.getClass.getSimpleName.replace("$", ""))
      // 控制在SparkSQL中进行Shuffle操作时默认的分区数，默认值为200，相当于会启动200个Task进行处理
      .config("spark.sql.shuffle.partitions", "2")
      .getOrCreate()

    import spark.implicits._
    import org.apache.spark.sql.functions._

    val dxDF: DataFrame = spark
      .read
      .format("csv")
      .option("sep", ",")
      .schema("mdn String,grid_id String,city_id String,county_id String,duration String,start_time String,end_time String,pt String")
      .load("spark/data/dianxin.csv")

    // 1、统计每座城市停留时间超过两小时的游客人数
    dxDF
      .select(
        $"mdn"
        , $"city_id"
        , (unix_timestamp($"end_time", "yyyyMMddHHmmss") - unix_timestamp($"start_time", "yyyyMMddHHmmss")) as "duration"
      ).groupBy($"mdn", $"city_id")
      .agg(round(sum($"duration") / 3600, 4) as "sum_duration")
      .where($"sum_duration" >= 2)
      .groupBy($"city_id")
      .agg(count("*") as "cnt")
    //      .show()

    // 2、统计每座城市停留时间最久的前三个区县
    dxDF
      // \N数据转成DF会变成字符串，如果是加载到表中则会变成null
      .where($"city_id" =!= "\\N" and $"county_id" =!= "\\N")
      .groupBy($"city_id", $"county_id")
      .agg(sum($"duration") as "sum_duration")
      .withColumn("rn", row_number() over Window.partitionBy($"city_id").orderBy($"sum_duration".desc))
      .where($"rn" <= 3)
    //      .show(1000)


    // 3、统计每座城市游客人数最多的前三个区县
    dxDF
      // \N数据转成DF会变成字符串，如果是加载到表中则会变成null
      .where($"city_id" =!= "\\N" and $"county_id" =!= "\\N")
      .groupBy($"city_id", $"county_id")
      .agg(countDistinct($"mdn") as "cnt")
      .withColumn("rn", row_number() over Window.partitionBy($"city_id").orderBy($"cnt".desc))
      .where($"rn" <= 3)
    //      .show(1000)

    // 4、统计每个用户在同一个区县内相邻两条位置记录之间的时间间隔
    dxDF
      .where($"county_id" =!= "\\N")
      .withColumn("last_end_time", lag($"end_time", 1, 0) over Window.partitionBy($"mdn", $"county_id").orderBy($"start_time"))
      // nanvl使用上无法判空，可以用coalesce替代
      .withColumn("interval_time", coalesce(unix_timestamp($"start_time", "yyyyMMddHHmmss") - unix_timestamp($"last_end_time", "yyyyMMddHHmmss"), expr("0")))
    //      .show(100)

    // 5、统计每个网格内停留时间降序排名，最长、最短以及平均停留时间
    dxDF
      .withColumn("rn",row_number() over Window.partitionBy($"grid_id").orderBy($"duration"))
      .withColumn("max_duration",max($"duration") over Window.partitionBy($"grid_id"))
      .withColumn("min_duration",max($"duration") over Window.partitionBy($"grid_id"))
      .withColumn("avg_duration",max($"duration") over Window.partitionBy($"grid_id"))
      .show()

  }

}
