package com.neuedu

import org.apache.spark.SparkConf
import org.apache.spark.sql.{SaveMode, SparkSession}

/**
 * 分析之后的数据同步到MySQL
 */
object hiveToMysqlOne {
  def main(args: Array[String]): Unit = {
    // 1. Spark 配置对象
    val sparkConf = new SparkConf()
      .setMaster("local[*]")

    // 2. SparkSession 对象
    val sparkSession = SparkSession
      .builder()
      .enableHiveSupport()  // 启用 Hive 支持
      .config(sparkConf)
      .appName("VehicleDataSync")
      .getOrCreate()

    // 3. 读取 Hive 中对应表的数据
    val df_a = sparkSession.sql("SELECT * FROM vehicledb.a_china_automobile_sales")
    val df_b = sparkSession.sql("SELECT * FROM vehicledb.b_global_ev")
    val df_c = sparkSession.sql("SELECT * FROM vehicledb.c_vehicle_production_sales")
    val df_d = sparkSession.sql("SELECT * FROM vehicledb.d_china_charging_pile")

    // 4. 将数据同步到 MySQL

    // a类表数据同步到 MySQL
    df_a.write
      .format("jdbc")
      .option("url", "jdbc:mysql://192.168.47.101:3306/vehicledb")
      .option("user", "root")
      .option("password", "123456")
      .option("dbtable", "a_china_automobile_sales")
      .mode(SaveMode.Append)
      .save()

    // b类表数据同步到 MySQL
    df_b.write
      .format("jdbc")
      .option("url", "jdbc:mysql://192.168.47.101:3306/vehicledb")
      .option("user", "root")
      .option("password", "123456")
      .option("dbtable", "b_global_ev")
      .mode(SaveMode.Append)
      .save()

    // c类表数据同步到 MySQL
    df_c.write
      .format("jdbc")
      .option("url", "jdbc:mysql://192.168.47.101:3306/vehicledb")
      .option("user", "root")
      .option("password", "123456")
      .option("dbtable", "c_vehicle_production_sales")
      .mode(SaveMode.Append)
      .save()

    // d类表数据同步到 MySQL
    df_d.write
      .format("jdbc")
      .option("url", "jdbc:mysql://192.168.47.101:3306/vehicledb")
      .option("user", "root")
      .option("password", "123456")
      .option("dbtable", "d_china_charging_pile")
      .mode(SaveMode.Append)
      .save()

    // 5. 资源释放
    sparkSession.stop()
  }
}

