package com.xzx.spark.tuning.utils

import com.xzx.spark.tuning.bean.{School, Student}
import org.apache.spark.SparkConf
import org.apache.spark.sql.{SaveMode, SparkSession}

import java.util.Random

/**
 *
 * ${DESCRIPTION}
 *
 * @author xinzhixuan
 * @version 1.0
 * @date 2022-02-25 6:44 PM
 */
object InitUtil {
  def main(args: Array[String]): Unit = {

    val sparkConf = new SparkConf().setAppName("InitData")
      .setMaster("local[*]") //TODO 要打包提交集群执行，注释掉
    val sparkSession: SparkSession = initSparkSession(sparkConf)
    initHiveTable(sparkSession)
    initBucketTable(sparkSession)
    // saveData(sparkSession)
    sparkSession.close()
  }



  def initHiveTable(sparkSession: SparkSession): Unit = {
    sparkSession.read.json("file:///Users/xinzhixuan/work/git2/spark3-study/input/coursepay.log")
      .write.partitionBy("dt", "dn")
      .format("parquet")
      .mode(SaveMode.Overwrite)
      .saveAsTable("sparktuning.course_pay")

    sparkSession.read.json("file:///Users/xinzhixuan/work/git2/spark3-study/input/salecourse.log")
      .write.partitionBy("dt", "dn")
      .format("parquet")
      .mode(SaveMode.Overwrite)
      .saveAsTable("sparktuning.sale_course")

    sparkSession.read.json("file:///Users/xinzhixuan/work/git2/spark3-study/input/courseshoppingcart.log")
      .write.partitionBy("dt", "dn")
      .format("parquet")
      .mode(SaveMode.Overwrite)
      .saveAsTable("sparktuning.course_shopping_cart")

  }

  def initBucketTable(sparkSession: SparkSession): Unit = {
    sparkSession.read.json("file:///Users/xinzhixuan/work/git2/spark3-study/input/coursepay.log")
      .write.partitionBy("dt", "dn")
      .format("parquet")
      .bucketBy(5, "orderid")
      .sortBy("orderid")
      .mode(SaveMode.Overwrite)
      .saveAsTable("sparktuning.course_pay_cluster")
    sparkSession.read.json("file:///Users/xinzhixuan/work/git2/spark3-study/input/courseshoppingcart.log")
      .write.partitionBy("dt", "dn")
      .bucketBy(5, "orderid")
      .format("parquet")
      .sortBy("orderid")
      .mode(SaveMode.Overwrite)
      .saveAsTable("sparktuning.course_shopping_cart_cluster")
  }

  def initSparkSession(sparkConf: SparkConf): SparkSession = {
    System.setProperty("HADOOP_USER_NAME", "xinzhixuan")
    val sparkSession = SparkSession.builder().config(sparkConf)
      // .config("hive.metastore.uris", "thrift://db2.statis.txdev:9083")//访问hive元数据
      .enableHiveSupport()// 开启hive的支持
      .getOrCreate()
    val hadoopConfiguration = sparkSession.sparkContext.hadoopConfiguration//高可用hdfs的配置
    hadoopConfiguration.set("fs.defaultFS", "hdfs://localhost:9820")

    sparkSession.sql("use sparktuning")
    sparkSession
  }

  def saveData(sparkSession: SparkSession): Unit = {
    import sparkSession.implicits._
    sparkSession.range(1000000).mapPartitions(partitions => {
      val random = new Random()
      partitions.map(item => Student(item, "name" + item, random.nextInt(100), random.nextInt(100)))
    }).write.partitionBy("partition")
      .mode(SaveMode.Append)
      .saveAsTable("sparktuning.test_student")

    sparkSession.range(1000000).mapPartitions(partitions => {
      val random = new Random()
      partitions.map(item => School(item, "school" + item, random.nextInt(100)))
    }).write.partitionBy("partition")
      .mode(SaveMode.Append)
      .saveAsTable("sparktuning.test_school")
  }

  def hangOn(): Unit = {
    while (true) {}
  }
}
