package com.qdu.data2

import org.apache.spark.sql.SaveMode

object Season_avg_temperature_task7 {
  def main(args: Array[String]): Unit = {

        import org.apache.spark.sql.SparkSession
        val spark = SparkSession.builder()
              .appName("Season_avg_temperature_task7")
              .master("spark://niit-master:7077")
              .config("hive.metastore.uris", "thrift://niit-master:9083")
              .enableHiveSupport()
              .getOrCreate()

    val csvFile = Seq("hdfs://niit-master/spark/Month_Average_Temperature.csv")
    val csvDF = spark.read.format("csv").option("header", true).load(csvFile.mkString(","))

    csvDF.createTempView("month_avg_temperature")

    val result =
      """
        |SELECT m1.province, m1.year ,
        |		round(AVG(
        |		CASE when month >=3 and month <=5 THEN month_avg_temperature
        |		END),2) spring ,
        |		round(AVG(
        |		CASE when month >=6 and month <=8 THEN month_avg_temperature
        |		END),2) summer ,
        |		round(AVG(
        |		CASE when month >=9 and month <=11 THEN month_avg_temperature
        |		END),2) autumn ,
        |		round(AVG(
        |		CASE when month =12 or month =1 or month =2 THEN month_avg_temperature
        |		END),2) winter
        |FROM month_avg_temperature m1
        |group by m1.year ,m1.province
        |order by m1.year
        |""".stripMargin

    val df = spark.sql(result)
    df.show(50, true)
    df.write.format("jdbc")
      .option("url", "jdbc:mysql://niit-master:3306/spark")
      .option("driver", "com.mysql.jdbc.Driver")
      .option("user", "root")
      .option("password", "root")
      .option("dbtable", "task7")
      .mode(SaveMode.Overwrite)
      .save()
  }
}
