package org.example

import org.apache.spark.sql.SparkSession

import java.util.regex.Pattern

object data1_movies {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder
      .master("local[*]")
      .appName("spark")
      .getOrCreate()
    val sc = spark.sparkContext

    val filePath = "E:\\hnl\\scala09\\Scala\\src\\main\\resources\\"
    val moviesRDD = sc.textFile(filePath + "movies.dat")
    val occupationsRDD = sc.textFile(filePath + "occupations.dat")
    val ratingsRDD = sc.textFile(filePath + "ratings.dat")
    val usersRDD = sc.textFile(filePath + "users.dat")
    println("电影总数:" + moviesRDD.count() + "\n" + "用户总数:" + usersRDD.count())
    println("评分总数:" + ratingsRDD.count() + "\n" + "职业数:" + occupationsRDD.count())
    val num = ratingsRDD.map(_.split("::"))
      .map(user => (user(1), user(0)))
      .filter(_._2.equals("3088"))
    println("用户Id为3088的评分电影总数为：" + num.count())
    val moviesInfo = moviesRDD.map(_.split("::"))
      .map(movie => (movie(0), (movie(1), movie(2))))
    val res = num.join(moviesInfo)
      .map(item => (item._1, item._2._1, item._2._1, item._2._2._2))
    //    println("电影详情是: \n")
    //    res.take(3).foreach(println)
    val moviesStyle = moviesRDD.map(_.split("::"))
      .map(tp => (tp(0), tp(2)))
      .flatMapValues(sp => sp.split("\\|"))
      .map(p => (p._2, 1))
      .reduceByKey((x, y) => x + y)
      .filter(_._1.equals("Comedy"))
    //      moviesStyle.foreach(println)
    //    moviesStyle.take(5).foreach(println)
    //统计每年度生产的电影总数
    val pattern = Pattern.compile("(.*)(\\d{4}\\))")
    val movieInfo = moviesRDD
      .map(_.split("::"))
      .map(x => {
        (x(1), 1)
      })
      .map(x => {
        var name = ""
        var year = ""
        val matcher = pattern.matcher(x._1)
        if (matcher.find()) {
          name = matcher.group(1)
          year = matcher.group(2)
          year = year.substring(1, year.length - 1)
        }
        if (year == "") {
          (-1, 1)
        } else {
          (year.toInt, 1)
        }
      })
      .reduceByKey((x, y) => x + y)
      .sortByKey()
    movieInfo.take(10).foreach(println(_))
    val count2000 = moviesRDD
      .map(_.split("::")(1))
      .filter(title => title.contains(""))
      .count()
    println(s"2000年生产的电影数量:$count2000")
    sc.stop()
  }
}
