package com._51doit.spark06

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object MovieTest01 {

  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf()
      .setMaster("local[*]")
      .setAppName(this.getClass.getSimpleName)

    val sc: SparkContext = new SparkContext(conf)

    val rateDat: RDD[String] = sc.textFile("D:\\07spark\\spark-day06\\作业题\\moviedata\\ratings.dat")
    val mvDat: RDD[String] = sc.textFile("D:\\07spark\\spark-day06\\作业题\\moviedata\\movies.dat")
    val usrDat: RDD[String] = sc.textFile("D:\\07spark\\spark-day06\\作业题\\moviedata\\users.dat")

    val res0: RDD[(String, Int)] = rateDat.map(rd => {
      val rateres1: Array[String] = rd.split("::")

      val uid: String = rateres1(0)
      val mid: String = rateres1(1)
      val rates: Int = rateres1(2).toInt

      //电影id 电影评分
      (mid, rates)
    })

    val res1: RDD[(String, Array[Int])] = res0.mapValues(Array(_))

    val res2: RDD[(String, Array[Int])] = res1.reduceByKey(_++_)

    val res3: RDD[(String, Int)] = res2.mapValues(ra=>ra.sum/ra.length)

    val res4: Array[(String, Int)] = res3.sortBy(-_._2).take(10)


   res2.foreach(println)
























  }



}
