package com.qdu.data2

import org.apache.spark.sql.SaveMode

object Coldest_Hottes_Month_task9 {
  def main(args: Array[String]): Unit = {
        import org.apache.spark.sql.SparkSession
        val spark = SparkSession.builder()
              .appName("Coldest_Hottes_Month_task9")
              .master("spark://niit-master:7077")
              .config("hive.metastore.uris", "thrift://niit-master:9083")
              .enableHiveSupport()
              .getOrCreate()

    val csvFile = Seq("hdfs://niit-master/spark/Month_Average_Temperature.csv")
    val csvDF = spark.read.format("csv").option("header", true).load(csvFile.mkString(","))
//    val csvDF = spark.read
//      .format("jdbc")
//      .option("url", "jdbc:mysql://localhost:3306/spark")
//      .option("driver", "com.mysql.jdbc.Driver")
//      .option("user", "root")
//      .option("password", "root")
//      .option("dbtable", "month_avg_temperature")
//      .load()

    csvDF.createTempView("month_avg_temperature")
    val result =
      """
        |SELECT m.province ,m.year , min(m.month_avg_temperature) cold, MAX(m.month_avg_temperature) hot240
        |    FROM month_avg_temperature m
        |    group by m.province, m.year
        |""".stripMargin

    val df = spark.sql(result)
    df.show(50, true)
    df.write.format("jdbc")
      .option("url", "jdbc:mysql://niit-master:3306/spark")
      .option("driver", "com.mysql.jdbc.Driver")
      .option("user", "root")
      .option("password", "root")
      .option("dbtable", "task9")
      .mode(SaveMode.Overwrite)
      .save()
  }
}
