package com.dataclean.lk

import org.apache.spark.sql.SparkSession

object PartClean {

  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName("PartClean")
      .config("spark.sql.warehouse.dir","hdfs://master:9000/user/hive/warehouse")
      .enableHiveSupport()
      .getOrCreate()

//    val url = "jdbc:mysql://192.168.23.51/shtd_store?useSSL=false"
//
//    val jdbcDF = spark.sqlContext.read.format("jdbc").options(
//      Map(
//        "url" -> url,
//        "user" -> "root",
//        "password" -> "123456",
//        "dbtable" -> "CUSTOMER"
//      )
//    ).load()
//
//    jdbcDF.createOrReplaceTempView("customer_mysql")

      spark.sqlContext.sql("use ods")

      spark.sqlContext.sql(
        """
          |select
          |*
          |from
          |part_lk
          |""".stripMargin).createOrReplaceTempView("part_lk_temp")

      spark.sqlContext.sql(
        """
          |insert into table dwd.part_lk
          |select
          |distinct
          |PARTKEY,
          |NAME,
          |MFGR,
          |BRAND,
          |TYPE,
          |SIZE,
          |CONTAINER,
          |RETAILPRICE,
          |COMMENT
          |from
          |part_lk_temp
          |""".stripMargin)



  }

}
