package com.shujia.sql

import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.functions.from_unixtime
import org.apache.spark.sql.{DataFrame, SparkSession}

object Code12DianXin {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .master("local")
      .appName("dsl")
      .config("spark.sql.shuffle.partitions", "3")
      .getOrCreate()


    val dxDF: DataFrame = spark
      .read
      .format("csv")
      .option("sep", ",")
      // 1500100001,施笑槐,22,女,文科六班
      .schema("mdn String, grid_id String, city_id String, county_id String, duration int, start_time String, end_time String, pt String")
      .load("spark_code/data/dianxin.csv")

    import org.apache.spark.sql.functions._
    import spark.implicits._

    dxDF
      // 统计每座城市停留时间超过两小时的游客人数
      .groupBy($"mdn", $"city_id")
      .agg(sum("duration") as "city_duration")
      .where($"city_duration" >= 120)
      .groupBy($"city_id")
      .agg(count("*") as "cnt")
      .show()


    // 2、统计每座城市停留时间最久的前三个区县
    dxDF
      .groupBy($"city_id", $"county_id")
      .agg(sum("duration") as "sum_duration")
      .withColumn("pm", row_number() over (Window.partitionBy($"city_id").orderBy($"sum_duration".desc)))
      .where($"pm" <= 3)
      .show()


    // 4、统计每个用户在同一个区县内相邻两条位置记录之间的时间间隔
    // 分析：1.对区县-用户进行分区
    //      2.对开始时间进行排序
    //      3.取当前行的结束时间和下一行的开始时间 进行时间的相减 得到 时间间隔
    dxDF
      .withColumn("next_startTime", lead($"start_time", 1) over Window.partitionBy($"county_id", $"mdn").orderBy($"start_time"))
      // 20180503173254
      .withColumn("sjc", unix_timestamp($"next_startTime", "yyyyMMddhhmmss") - unix_timestamp($"end_time", "yyyyMMddhhmmss"))
      .show()

    // 5、统计每个网格内停留时间降序排名，最长、最短以及平均停留时间

    dxDF
      .withColumn("pm", row_number() over Window.partitionBy($"grid_id").orderBy($"duration".desc))
      .withColumn("max_duration", max("duration") over Window.partitionBy($"grid_id"))
      .withColumn("min_duration", min("duration") over Window.partitionBy($"grid_id"))
      .withColumn("avg_duration", avg("duration") over Window.partitionBy($"grid_id"))
      .show()

  }
}
