package com.study.spark.ml.movie.basic

import com.study.spark.ml.movie.Util
import org.apache.spark.sql.Row

/**
  * 电影信息
  *
  * @author: stephen.shen
  * @create: 2019-04-10 9:06
  */
object MovieData {

  def main(args: Array[String]): Unit = {
    val movieYears = getMovieYearsCountSorted
    for (i <- 0 until movieYears.length) {
      println(movieYears(i))
    }

  }

  def getMovieYearsCountSorted(): Array[(String, Long)] = {
    val df = Util.getMovieDataDF()

    df.printSchema()
    // 注册UDF
    Util.spark.udf.register("convertYear", Util.convertYear _)
    // 查询所有日期，并使用UDF提取年份
    val yearsDF = Util.spark.sql("SELECT convertYear(date) AS year FROM movie_data")
    // 使用desc需要导入隐式转换
    import org.apache.spark.sql.functions._
    // 按照year分组汇总以及排序
    val resultDF = yearsDF.groupBy("year").count().orderBy(asc("year"))
    // 将DF转成RDD之后，将数据封装成元组
    val result = resultDF.rdd
      .map(row => (row.getAs[String]("year"), row.getAs[Long]("count")))
      .collect()
    Util.spark.sparkContext.stop()
    result
  }
}
