package chapter04

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SparkSession

object Test08_score {
  def main(args: Array[String]): Unit = {
    Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
    val spark = SparkSession.builder()
      .appName("score")
      .master("local[*]")
      .getOrCreate()
    import spark.implicits._
    //读文件 电影文件
    val sc = spark.sparkContext
    val dfMovie = sc.textFile("input/movies.dat")
      .map(e => e.split("::"))
      .map(e => (e(0).toInt, e(1), e(2)))
      .toDF("movieId", "title", "genres")
    val dfRatings = sc.textFile("input/ratings.dat")
      .map(e => e.split("::"))
      .map(e => (e(0).toInt, e(1).toInt, e(2).toInt, e(3).toLong))
      .toDF("userId", "movieId", "rating", "timestamp")
    //join两组数据
    dfMovie.join(dfRatings,"movieId")
      .groupBy("title")
      .avg("rating")
      .sort($"avg(rating)".desc)
      .repartition(3)
      .write
      .option("header","true")
      .csv("input/movieScore")
  }
}
