import org.apache.spark.{SparkConf, SparkContext}

object MovieJoin {
  def main(args: Array[String]): Unit = {
    val conf =new SparkConf().setAppName("MovieLengs").setMaster("local")
    conf.set("spark.testing.memory", "2147480000")
    val sc = new SparkContext(conf)
    val movies = sc.textFile("src/movielens/ml-latest-small/movies.csv")
    val  movieKeys = movies.map(line => {
      val ms = line.split(",")
      (ms(0).toInt, ms(1))
    }).keyBy(tup => tup._1)

    val score = sc.textFile("src/movielens/ml-latest-small/ratings.csv")
    val rating = score.map(line => {
      val arr = line.split(",")
      (arr(1).toInt, arr(2).toDouble)
    })

    val movieScores = rating.groupByKey().map(line => {
      val avg = line._2.sum / line._2.size
      (line._1.toInt, avg)
    })

    val result = movieScores.keyBy(tup => tup._1)
      .join(movieKeys)
      .filter(f => {f._2._1._2 > 4.0})
      .map(f=>(f._1, f._2._1._2, f._2._2._2))

    result.saveAsTextFile("src/movielens/ml-latest-small/result")
  }
}
