package com.software.process.old

import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

object Save0_23h {
  Logger.getLogger("org").setLevel(Level.ERROR)

  def main(args: Array[String]): Unit = {
    //1.创建Spark环境配置对象
    val conf = new SparkConf().setAppName("SparkSqlCSVExample").setMaster("local").set("spark.testing.memory", "2147480000")
    //2.创建SparkSession对象
    val spark: SparkSession = SparkSession.builder().config(conf).getOrCreate()
    var commentData: DataFrame = spark.read.format("csv")
      .option("header", true)
      .option("multiLine", true)
      .load("D:\\csv\\city_province.csv")

    //3.注册临时表
    commentData.createOrReplaceTempView("china_cities")
    //4.查询操作
    val sql = "select * from china_cities".stripMargin //select type,hour,`北京` from china_cities where type IN ('PM2.5','PM10','SO2','NO2','O3','CO')
    val sqlresult: DataFrame = {
      spark.sql(sql)
    }
    //5.将分析结果保存到数据表中
    //       eg:存入weatherdb数据库中
    sqlresult.write
      .format("jdbc")
      .option("url", "jdbc:mysql://localhost:3306/AirDB?serverTimezone=GMT%2B8") //?serverTimezone=GMT%2B8
      .option("driver", "com.mysql.cj.jdbc.Driver")
      .option("user", "root")
      .option("password", "lyf20020511")
      .option("dbtable", "city_province")
      .mode(SaveMode.Append)
      .save()
  }
}
