package com.qdu.data1

import org.apache.spark.sql.{SaveMode}

object Popular_music_task5 {
  def main(args: Array[String]): Unit = {
      import org.apache.spark.sql.SparkSession
      val spark = SparkSession.builder()
            .appName("Popular_music_task5")
            .master("spark://niit-master:7077")
            .config("hive.metastore.uris", "thrift://niit-master:9083")
            .enableHiveSupport()
            .getOrCreate()

    val csvFile = Seq("hdfs://niit-master/spark/douyin_dataset.csv")
    val csvDF = spark.read.format("csv").option("header",true).load(csvFile.mkString(","))

    csvDF.createTempView("DY")

    val result =
      """
        |select music_id, count(DISTINCT item_id) as num
        |from DY
        |group by music_id
        |order by num desc
        |limit 100
        |""".stripMargin

    val df = spark.sql(result)
    df.show(true)
    df.write.format("jdbc")
      .option("url", "jdbc:mysql://niit-master:3306/spark")
      .option("driver", "com.mysql.jdbc.Driver")
      .option("user", "root")
      .option("password", "root")
      .option("dbtable", "task5")
      .mode(SaveMode.Overwrite)
      .save()
  println("结果保存成功")

  }
}
