package com.xl.competition.old.task1.f1

import org.apache.spark.sql.SparkSession

import java.util.Properties

/**
 * @author: xl
 * @createTime: 2023/11/9 19:29:18
 * @program: com.xl.competition
*/
object DataLoadPart {
  def main(args: Array[String]): Unit = {
    System.setProperty("HADOOP_USER_NAME", "root")

    val sc: SparkSession = SparkSession
      .builder()
      .master("local[*]")
      .appName(this.getClass.getName)
      .enableHiveSupport()
      .config("hive.metastore.uris", "thrift://master:9083")
      .getOrCreate()

    val properties = new Properties()
    properties.put("user", "root")
    properties.put("password", "Abc123..")

    sc.read
      .jdbc("jdbc:mysql://192.168.0.1:3306/competition", "part", properties)
      .createTempView("tmp_part")

    sc.sql(
      """
        |create table if not exists ods.part
        |(
        |    `partkey`     int,
        |    `NAME`        string,
        |    `mfgr`        string,
        |    `brand`       string,
        |    `TYPE`        string,
        |    `size`        string,
        |    `container`   string,
        |    `retailprice` decimal(12, 2),
        |    `times`       bigint
        |) PARTITIONED BY (`dt` STRING)
        |    ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t'
        |        NULL DEFINED AS ''
        |    LOCATION '/warehouse/competition/ods/ods_part/'
        |""".stripMargin)

    sc.sql(
      """
        |insert overwrite table ods.part partition (dt = '20231108')
        |select partkey,
        |       name,
        |       mfgr,
        |       brand,
        |       type,
        |       size,
        |       container,
        |       retailprice,
        |       times
        |from tmp_part
        |""".stripMargin)

    sc.stop()
  }
}
