package com.neuedu.mysql

import org.apache.spark.SparkConf
import org.apache.spark.sql.{SaveMode, SparkSession}

/**
 * 分析之后的数据同步到MySQL
 */
object AgeInCardioPercent {
  def main(args: Array[String]): Unit = {
    //1. Spark配置对象
    val sparkConf = new SparkConf()
      .setMaster("local[*]")
    //2. SparkSession对象
    val sparkSession = SparkSession
      .builder()
      .enableHiveSupport() //-开启hive的支持
      .config(sparkConf)
      .appName("AgeInCardioPercent")
      .getOrCreate()

    //3. 读取hive中对应表的内容
    val df_all = sparkSession.sql("select * from db_heart.dws_cardio_age")
    val df_male = sparkSession.sql("select * from db_heart.dws_cardio_age_male")
    val df_female = sparkSession.sql("select * from db_heart.dws_cardio_age_female")

    //4. 将读取到的内容，导出到MySQL数据库中
    df_all.write
      .format("jdbc")
      .option("url","jdbc:mysql://192.168.170.101:3306/heartdb")
      .option("user","root")
      .option("password","123456")
      .option("dbtable","cardio_age")
      .mode(SaveMode.Append)
      .save()

    df_male.write
      .format("jdbc")
      .option("url","jdbc:mysql://192.168.170.101:3306/heartdb")
      .option("user","root")
      .option("password","123456")
      .option("dbtable","cardio_age_male")
      .mode(SaveMode.Append)
      .save()

    df_female.write
      .format("jdbc")
      .option("url","jdbc:mysql://192.168.170.101:3306/heartdb")
      .option("user","root")
      .option("password","123456")
      .option("dbtable","cardio_age_female")
      .mode(SaveMode.Append)
      .save()

    //-资源的释放
    sparkSession.stop()
    sparkSession.close()
  }
}
