package com.software.process.old

import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

import java.util.Properties

object CalAllYear {

  Logger.getLogger("org").setLevel(Level.ERROR)

  def main(args: Array[String]): Unit = {
    //1.创建Spark环境配置对象
    val conf = new SparkConf().setAppName("SparkSqlCSVExample").setMaster("local").set("spark.testing.memory", "2147480000")

    //2.创建SparkSession对象
    val spark: SparkSession = SparkSession
      .builder()
      .master("local[*]")
      .appName("Cal1_7d")
      .config("spark.testing.memory", "2147480000")
      .getOrCreate()
    spark.sparkContext.setLogLevel("WARN")
    val properties = new Properties()
    properties.setProperty("user", "root")
    properties.setProperty("password", "lyf20020511")
    val url = "jdbc:mysql://localhost:3306/airDB?serverTimezone=GMT%2B8&characterEncoding=utf8&useSSL=true"
    var Data: DataFrame = spark.read.jdbc(url, "allyear", properties)

    val sql = "SELECT date,type,ROUND(AVG(CAST(BeiJing AS FLOAT)), 2) AS average FROM china_cities GROUP BY date,type;".stripMargin
    val sqlResult: DataFrame = {
      spark.sql(sql)
    }


    sqlResult.show()

    sqlResult.write
      .format("jdbc")
      .option("url", "jdbc:mysql://localhost:3306/AirDB?serverTimezone=GMT%2B8") //?serverTimezone=GMT%2B8
      .option("driver", "com.mysql.cj.jdbc.Driver")
      .option("user", "root")
      .option("password", "lyf20020511")
      .option("dbtable", "year_BeiJing")
      .mode(SaveMode.Append)
      .save()
  }
}
