package com.zuoye.mysql

import org.apache.spark.SparkConf
import org.apache.spark.sql.{SaveMode, SparkSession}

object WeatherAnalysis {
  def main(args: Array[String]): Unit = {

    val sparkConf = new SparkConf()
      .setMaster("local[*]")
    //2. SparkSession对象
    val sparkSession = SparkSession
      .builder()
      .enableHiveSupport() //-开启hive的支持
      .config(sparkConf)
      .appName("WeatherAnalysis")
      .getOrCreate()
   val df_count = sparkSession.sql("select * from db_weather.dws_weather_count_table")
    val df_relation = sparkSession.sql("select * from db_weather.dws_wind_weather_relation_table")
    val df_air = sparkSession.sql("select * from db_weather.dws_air_quality_counts")
    val df_weather = sparkSession.sql("select * from db_weather.dws_weather_quality_index")
    //4. 将读取到的内容，导出到MySQL数据库中
    df_count.write  .format("jdbc")
    .option("url","jdbc:mysql://192.168.79.105:3306/weatherdb")
    .option("user","root")
      .option("password","123456")
    .option("dbtable","weather_count_table")  .mode(SaveMode.Append)
    .save()
    df_relation.write   .format("jdbc")
    .option("url","jdbc:mysql://192.168.79.105:3306/weatherdb")
    .option("user","root")
    .option("password","123456")
    .option("dbtable","wind_weather_relation_table")
    .mode(SaveMode.Append)
    .save()
    df_air.write
    .format("jdbc")
    .option("url","jdbc:mysql://192.168.79.105:3306/weatherdb")
      .option("user","root")
      .option("password","123456")
      .option("dbtable","air_quality_counts")
      .mode(SaveMode.Append)
      .save() //-资源的释放
    df_weather.write
      .format("jdbc")
      .option("url","jdbc:mysql://192.168.79.105:3306/weatherdb")
      .option("user","root")
      .option("password","123456")
      .option("dbtable","weather_quality_index")
      .mode(SaveMode.Append)
      .save() //-资源的释放
    sparkSession.stop()
    sparkSession.close()
  }

}
