package com.spark.cust.movie

import java.util.Properties

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Row, SparkSession}
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}

/**
 * @description:
 * @time: 2020/12/4 12:47
 * @author: lhy
 */
object Code04_MovieType {
    def main(args: Array[String]): Unit = {
        val spark: SparkSession = SparkSession.builder().appName("MovieType").master("local").getOrCreate()
        import spark.implicits._
        val fields: Array[StructField] = Array(StructField("movieType", StringType, nullable = true),
                                               StructField("number", IntegerType, nullable = true))
        val schema: StructType = StructType(fields)
        val moviesFile: RDD[String] = spark.sparkContext.textFile("input/movie/movies.dat")
        val movieType: RDD[Row] = moviesFile.map(f => f.split("::")(2)).map{ f =>
            val index_R: Int = f.indexOf('|')
            if (index_R > 0){
                val movie_type: String = f.substring(0,index_R)
                (movie_type,1)
            }else{
                val movie_type: String = f
                (movie_type,1)
            }
        }.reduceByKey(_+_)
          .sortBy(f => f._2,ascending = false)
          .map(attributes => Row(attributes._1,attributes._2))
        val movieTypeDF: DataFrame = spark.createDataFrame(movieType,schema)
        val prop = new Properties()
        prop.put("user","root")
        prop.put("password","bigdata")
        prop.put("driver","com.mysql.jdbc.Driver")
        // 下面连接数据库，采用append模式，表示追加记录到数据库spark的student表中
        movieTypeDF.write.mode("append").jdbc("jdbc:mysql://192.168.21.104:3306/spark","spark.movie_type",prop)

        // 必须注册成临时表才能供下面的查询使用
        movieTypeDF.createOrReplaceTempView("movie_type")
        val results: DataFrame = spark.sql("select movieType,number from movie_type")
        results.map(attributes => attributes(0)+" "+attributes(1)).show()
    }
}
