package com.xl.competition.old.readMysql2Hive

import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

import java.util.Properties

/**
 * Author: xl
 * Date: 2022/3/4 15:01
 * program: com.xl.competition
 * Email: 2199396150@qq.com
 */
object Mysql2Hive {
  def main(args: Array[String]): Unit = {

    val MysqlDb: String = args(0)
    val HiveDb: String = args(1)
    val partition: String = args(2)

    val spark: SparkSession = SparkSession
      .builder()
      .master("local[*]")
      .appName(this.getClass.getName)
      .enableHiveSupport()
      .config("hive.exec.dynamic.partition.mode", "nonstrict")
      .getOrCreate()


    val properties = new Properties()
    properties.put("user", "root")
    properties.put("password", "Abc123..")
    val frame: DataFrame = spark.read.jdbc("jdbc:mysql://192.2.41.100:3306/bigdata", MysqlDb, properties)
    System.getProperty("HADOOP_USER_NAME","xxl")
    frame.show(10)
    
    frame.write
      .mode(SaveMode.Overwrite)
      .partitionBy(s"${partition}")
      .saveAsTable(f"bigdata.${HiveDb}")
  }
}
