
import org.apache.spark.SparkConf
import org.apache.spark.sql.{SaveMode, SparkSession}
/**
 * 分析之后的数据同步到MySQL
 */
object mysql {
  def main(args: Array[String]): Unit = {
    //1. Spark配置对象
    val sparkConf = new SparkConf()
      .setMaster("local[*]")
    //2. SparkSession对象
    val sparkSession = SparkSession
      .builder()
      .enableHiveSupport() //-开启hive的支持
      .config(sparkConf)
      .appName("mysql")
      .getOrCreate()
    //3. 读取hive中对应表的内容
    val age_num = sparkSession.sql("select * from db_produce.age_num")
    val df_area = sparkSession.sql("select * from db_produce.area_num")
    val df_code = sparkSession.sql("select * from db_produce.code_num")
    val df_price = sparkSession.sql("select * from db_produce.price_ptv_num")
    val df_price_num = sparkSession.sql("select * from db_produce.price_ptv_tb")
 //4. 将读取到的内容，导出到MySQL数据库中
    age_num.write
      .format("jdbc")
      .option("url","jdbc:mysql://192.168.244.130:3306/producedb")
      .option("user","root")
      .option("password","root")
      .option("dbtable","age_num")
      .mode(SaveMode.Append)
      .save()
    df_area.write
      .format("jdbc")
      .option("url","jdbc:mysql://192.168.244.130:3306/producedb")
      .option("user","root")
      .option("password","root")
      .option("dbtable","area_num")
      .mode(SaveMode.Append)
      .save()
    df_code.write
      .format("jdbc")
      .option("url","jdbc:mysql://192.168.244.130:3306/producedb")
      .option("user","root")
      .option("password","root")
      .option("dbtable","code_num")
      .mode(SaveMode.Append)
      .save()
    df_price.write
      .format("jdbc")
      .option("url","jdbc:mysql://192.168.244.130:3306/producedb")
      .option("user","root")
      .option("password","root")
      .option("dbtable","price_ptv_num")
      .mode(SaveMode.Append)
      .save()
    df_price_num.write
      .format("jdbc")
      .option("url","jdbc:mysql://192.168.244.130:3306/producedb")
      .option("user","root")
      .option("password","root")
      .option("dbtable","price_ptv_tb")
      .mode(SaveMode.Append)
      .save()
    //-资源的释放
    sparkSession.stop()
    sparkSession.close()
  }
}