package com.software.process.old

import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

import java.time.LocalDate
import java.time.format.DateTimeFormatter

object Save_AQI {
  Logger.getLogger("org").setLevel(Level.ERROR)

  def main(args: Array[String]): Unit = {
    //1.创建Spark环境配置对象
    val conf = new SparkConf().setAppName("SparkSqlCSVExample").setMaster("local").set("spark.testing.memory", "2147480000")
    val start = LocalDate.parse("20220101", DateTimeFormatter.BASIC_ISO_DATE)
    val end = LocalDate.parse("20220131", DateTimeFormatter.BASIC_ISO_DATE)
    val formatter = DateTimeFormatter.ofPattern("yyyyMMdd")
    //2.创建SparkSession对象
    for (date <- start.toEpochDay() to end.toEpochDay()) {
      val localDate = LocalDate.ofEpochDay(date)
      val fdate = localDate.format(formatter)
      val spark: SparkSession = SparkSession.builder().config(conf).getOrCreate()
      var commentData: DataFrame = spark.read.format("csv")
        .option("header", true)
        .option("multiLine", true)
        .load("E:\\cities_20220101-20221231\\china_cities_" + fdate + ".csv")

      //3.注册临时表
      commentData.createOrReplaceTempView("china_cities")
      //4.查询操作
      val sql = "select * from china_cities where type = 'AQI' ".stripMargin
      val sqlresult: DataFrame = {
        spark.sql(sql)
      }
      //5.将分析结果保存到数据表中
      //       eg:存入weatherdb数据库中
      sqlresult.write
        .format("jdbc")
        .option("url", "jdbc:mysql://localhost:3306/AirDB?serverTimezone=GMT%2B8") //?serverTimezone=GMT%2B8
        .option("driver", "com.mysql.cj.jdbc.Driver")
        .option("user", "root")
        .option("password", "lyf20020511")
        .option("dbtable", "all_AQI_M1")
        .mode(SaveMode.Append)
        .save()
    }
  }
}
