package com.qdu.data2

import org.apache.spark.sql.SaveMode

object Province_avg_temperature_task6 {
  def main(args: Array[String]): Unit = {
      import org.apache.spark.sql.SparkSession
      val spark = SparkSession.builder()
            .appName("Province_avg_temperature_task6")
            .master("spark://niit-master:7077")
            .config("hive.metastore.uris", "thrift://niit-master:9083")
            .enableHiveSupport()
            .getOrCreate()

  val csvFile = Seq("hdfs://niit-master/spark/Year_Average_Temperature.csv")
  val csvDF = spark.read.format("csv").option("header", true).load(csvFile.mkString(","))

//    val csvDF = spark.read
//      .format("jdbc")
//      .option("url", "jdbc:mysql://niit-master:3306/spark")
//      .option("driver", "com.mysql.jdbc.Driver")
//      .option("user", "root")
//      .option("password", "root")
//      .option("dbtable", "year_avg_temperature")
//      .load()

    csvDF.createTempView("year_avg_temperature")

    val result =
      """
        |SELECT y.year ,y.province ,round(AVG(y.year_avg_temperature),2) temperature
        |FROM year_avg_temperature y
        |group by y.year ,y.province
        |order by y.province , y.year
        |""".stripMargin

    val df = spark.sql(result)
    df.show(50,true)
    df.write.format("jdbc")
      .option("url", "jdbc:mysql://niit-master:3306/spark")
      .option("driver", "com.mysql.jdbc.Driver")
      .option("user", "root")
      .option("password", "root")
      .option("dbtable", "task6")
      .mode(SaveMode.Overwrite)
      .save()
  }
}
