package DataAnalysis_fyp


import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.{avg, col, desc, explode, split,round}

import java.util.Properties

object d5 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("数据分析第一题")
      .enableHiveSupport()
      .getOrCreate()

    import spark.implicits._

    // 准备连接mysql的配置
    val conn = new Properties()
    conn.setProperty("user", "root")
    conn.setProperty("password", "123456")
    conn.setProperty("driver", "com.mysql.jdbc.Driver")

    //  todo 读取已经处理好的数据
    val data = spark.read
      .jdbc("jdbc:mysql://192.168.40.110:3306/fyp?useSSL=false", "clean", conn)


    //  todo 分析每种电影的平均分
    val result = data.withColumn("type",explode(split(col("class")," ")))
      .groupBy("type")
      .agg(round(avg("mark"),2).as("avg"))
      .distinct()
      .orderBy(desc("avg"))

    result.show(50)

        result.write.format("overwrite")
          .jdbc("jdbc:mysql://192.168.40.110:3306/fyp?useSSL=false", "r5", conn)


    spark.close()
  }

}
