package com.example.client

import com.example.util.{JdbcUtil, SparkUtil}
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.{DataFrame, Row, SparkSession}

/**
 * @title: LoadData
 * @projectName bigdata
 * @description: load data to mysql
 * @author leali
 * @date 2022/5/10 20:45
 */
object LoadData {
  Logger.getLogger("org.apache.spark").setLevel(Level.WARN)


  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkUtil.initSpark(enableHive = true)
    spark.sparkContext.setLogLevel("WARN")
    val tableName: String = "t_sz_tong"
    val frame: DataFrame = spark.sql(s"SELECT * FROM $tableName")
    val fieldLength: Int = frame.schema.length
    frame.repartition(10).foreachPartition((records: Iterator[Row]) => {
      JdbcUtil.upsertData(rows = records, tableName = tableName, fieldLength = fieldLength, batchSize = 10)
    })
  }
}
