package com.qdu.data1

import org.apache.spark.sql.SaveMode

object Duration_rate_task2 {
  def main(args: Array[String]): Unit = {

        import org.apache.spark.sql.SparkSession
        val spark = SparkSession.builder()
              .appName("Duration_rate_task2")
              .master("spark://niit-master:7077")
              .config("hive.metastore.uris", "thrift://niit-master:9083")
              .enableHiveSupport()
              .getOrCreate()

    val csvFile = Seq("hdfs://niit-master/spark/douyin_dataset.csv")
    val csvDF = spark.read.format("csv").option("header", true).load(csvFile.mkString(","))

    csvDF.createTempView("DY")

    val result =
      """
        |select duration_time,round(sum(finish)/count(*),4) as finish_rate,
        |    round(sum(like)/count(*),4) as like_rate
        |    from DY
        |    group by duration_time
        |    Having sum(like) != 0 and sum(finish) != 0
        |    order by duration_time
        |""".stripMargin

    val df = spark.sql(result)
    df.show(true)
    df.write.format("jdbc")
      .option("url", "jdbc:mysql://localhost:3306/spark")
      .option("driver", "com.mysql.jdbc.Driver")
      .option("user", "root")
      .option("password", "root")
      .option("dbtable", "task2")
      .mode(SaveMode.Overwrite)
      .save()

  }

}
