package com.shujia.sql

import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.{DataFrame, SparkSession}

object Demo11Year {
  def main(args: Array[String]): Unit = {


    val spark: SparkSession = SparkSession
      .builder()
      .master("local")
      .appName("year")
      .config("spark.sql.shuffle.partitions", 1)
      .getOrCreate()

    import spark.implicits._
    import org.apache.spark.sql.functions._

    val data: DataFrame = spark
      .read
      .format("json")
      .load("data/精准搜索.txt")

    data.cache()

    data.printSchema()
    data.show()

    /**
      * 统计每个年代观影次数，年代取year
      */
    data
      //读取年代
      .select($"recordMap.year" as "year")
      //过滤为空的年代
      .where($"year".isNotNull && $"year" =!= "")
      //统计年代观影次数
      .groupBy($"year")
      .agg(count($"year") as "num")
      //增加排名
      .withColumn("r", row_number() over Window.orderBy($"num".desc))
      .show(1000)

    /**
      * 2、统计每种tag观看影片的次数，tags有多种
      */

    data
      //取出标签，将数据展开
      .select(explode(split($"useMap.tags", "\\|")) as "tag")
      //过滤数据
      .where($"tag".isNotNull && $"tag" =!= "")
      .groupBy($"tag")
      .agg(count($"tag") as "num")
      .withColumn("r", row_number() over Window.orderBy($"num".desc))
      .show()

  }

}
