package com.qdu.data2

import org.apache.spark.sql.SaveMode

object QD_temperature_task10 {
  def main(args: Array[String]): Unit = {
        import org.apache.spark.sql.SparkSession
        val spark = SparkSession.builder()
              .appName("QD_temperature_task10")
              .master("spark://niit-master:7077")
              .config("hive.metastore.uris", "thrift://niit-master:9083")
              .enableHiveSupport()
              .getOrCreate()

    val csvFile = Seq("hdfs://niit-master/spark/Day_Average_Temperature.csv")
    val csvDF = spark.read.format("csv").option("header", true).load(csvFile.mkString(","))
//    val csvDF = spark.read
//      .format("jdbc")
//      .option("url", "jdbc:mysql://niit-master:3306/spark")
//      .option("driver", "com.mysql.jdbc.Driver")
//      .option("user", "root")
//      .option("password", "root")
//      .option("dbtable", "day_avg_temperature")
//      .load()

    csvDF.createTempView("day_avg_temperature")
    val result =
      """
        |SELECT d.city ,SUBSTRING_INDEX(d.date,'-',1) year ,round(AVG(d.day_avg_temperature),2) avgt
        |    FROM day_avg_temperature d
        |    where   d.city ='青岛市' or d.city = '北京市'
        |    GROUP BY d.city ,SUBSTRING_INDEX(d.date,'-',1)
        |    ORDER BY year , d.city
        |""".stripMargin

    val df = spark.sql(result)
    df.show(50, true)
    df.write.format("jdbc")
      .option("url", "jdbc:mysql://niit-master:3306/spark")
      .option("driver", "com.mysql.jdbc.Driver")
      .option("user", "root")
      .option("password", "root")
      .option("dbtable", "task10")
      .mode(SaveMode.Overwrite)
      .save()
  }
}
