import org.apache.spark.SparkConf
import org.apache.spark.sql.types.IntegerType
import org.apache.spark.sql.{SaveMode, SparkSession}
import util.dimDate
import java.time.LocalDate
import java.util.Properties

object movie_dw {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
      .setAppName("movie_dw")
      .setMaster("local[*]")

    val spark = SparkSession
      .builder()
      .config(conf)
      .enableHiveSupport()
      .getOrCreate()

    import spark.implicits._
    import org.apache.spark.sql.functions._

    val mysqlConf = new Properties()

    val url = "jdbc:mysql://leafdomain.cn:3306/movie?useUnicode=true&charSet=utf-8"
    mysqlConf.setProperty("url", url)
    mysqlConf.setProperty("user", "root")
    mysqlConf.setProperty("password", "123456")
    mysqlConf.setProperty("driver", "com.mysql.cj.jdbc.Driver")

    val movie = spark
      .table("movies.movie")
      .cache()

    /**
     * 电影总体指标：电影数量, 平均评分, 平均评论人数, 平均时长
     */
    movie
      .agg(
        count("*").as("movie_cnt"),
        avg($"rating").as("avg_rating"),
        avg("rating_count").as("avg_rat_cnt"),
        avg(regexp_extract($"runtime"(0), "(\\d+)", 1).cast(IntegerType)).as("avg_runtime")
      )
      .write
      .mode(SaveMode.Overwrite)
      .jdbc(url, "movie_main", mysqlConf)

    /**
     * 今年 电影放映   时间地点  名称   滚动列表
     */
     movie
       .withColumn("release_date", explode($"release_date"))
       .where($"release_date".substr(0, 4).equalTo(LocalDate.now().getYear))
       .select($"release_date", $"name")
       .orderBy($"release_date")
       .write
       .mode(SaveMode.Overwrite)
       .jdbc(url, "movie_list", mysqlConf)

    /**
     * 人物主演电影数量, 评论数量    名称词云图
     */
    movie
      .withColumn("actors", explode($"actors"))
      .groupBy($"actors")
      .agg(
        count("*").as("atr_movie_cnt"),
        sum($"rating_count").as("atr_rating_cnt")
      )
      .select($"actors", $"atr_movie_cnt", $"atr_rating_cnt")
      .write
      .mode(SaveMode.Overwrite)
      .jdbc(url, "movie_actor", mysqlConf)

    /**
     * 导演 的电影数量, 评论数量  词云图
     */
    movie
      .withColumn("directors", explode($"directors"))
      .groupBy($"directors")
      .agg(
        count("*").as("dtr_movie_cnt"),
        sum($"rating_count").as("dtr_rating_cnt")
      )
      .select($"directors", $"dtr_movie_cnt", $"dtr_rating_cnt")
      .orderBy($"dtr_rating_cnt".desc)
      .write
      .mode(SaveMode.Overwrite)
      .jdbc(url, "movie_director", mysqlConf)



    // 构建时间维度表，展示一年的
    val currentYear = LocalDate.now().getYear
    var currentMonth = LocalDate.now().getMonthValue.toString
    if(currentMonth.length == 1){
      currentMonth = s"0${currentMonth}"
    }
    val movie_date = spark
      .createDataFrame(
        dimDate(s"${currentYear - 1}-$currentMonth",
          s"$currentYear-$currentMonth")
      )
      .cache()
    /**
     * 最近一年内每月上映电影数 和 平均评分 折线图
     */
    movie
      .withColumn("release_date", explode($"release_date"))
      .withColumn("release_date", regexp_extract($"release_date", "^(\\d{4}-\\d{2})", 1))
      .where($"release_date".isNotNull)
      .join(movie_date.as("D"),$"release_date"===$"D.date", "right")
      .groupBy($"date")
      .agg(
        count("*").as("release_cnt"),
        avg($"rating").as("date_avg_rating")
      )
      .orderBy("date")
      .write
      .mode(SaveMode.Overwrite)
      .jdbc(url, "movie_date", mysqlConf)

    /**
     * 各个电影类型数量 和 平均评分 柱状图
     */
    movie
      .withColumn("genres", explode($"genres"))
      .groupBy($"genres")
      .agg(
        count("*").as("genres_movie_cnt"),
        avg($"rating").as("genres_avg_rating")
      )
      .write
      .mode(SaveMode.Overwrite)
      .jdbc(url, "movie_genre", mysqlConf)

    /**
     * 各国制片数量 和 平均评分 饼图
     */
    movie
      .withColumn("production_countries", explode($"production_countries"))
      .withColumn("production_countries",
        when($"production_countries".substr(0, 2).equalTo("中国"), "中国")
          .otherwise($"production_countries")
      )
      .groupBy("production_countries")
      .agg(
        count("*").as("country_movie_cnt"),
        avg($"rating").as("country_avg_rating")
      )
      .write
      .mode(SaveMode.Overwrite)
      .jdbc(url, "movie_country", mysqlConf)

    spark.stop()
  }
}
