package com.dataclean.lk

import org.apache.spark.sql.SparkSession

object LineitemClean {


  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName("LineitemClean")
      .config("spark.sql.warehouse.dir","hdfs://master:9000/user/hive/warehouse")
      .enableHiveSupport()
      .getOrCreate()

    spark.sqlContext.sql("use ods")
    //将去重之后的数据放入临时表中
    val hiveDF = spark.sqlContext.sql(
      """
        |select
        |distinct
        |*
        |from
        |lineitem_lk
        |""".stripMargin)

    hiveDF.createOrReplaceTempView("lineitem_lk_hive")
    //将表中三列时间格式强制转换为时间戳形式并存入dwd库的表中
    spark.sqlContext.sql(
      """
        |insert overwrite table dwd.lineitem_lk
        |(
        |select
        |orderkey,
        |partkey,
        |suppkey,
        |lienumber,
        |quantity,
        |extendedprice,
        |discount,
        |tax,
        |returnflag,
        |linestatus,
        |date_format(shipdate,'yyyy-MM-dd HH:mm:ss') as shipdate,
        |date_format(commentdate,'yyyy-MM-dd HH:mm:ss') as commentdate,
        |date_format(receiptdate,'yyyy-MM-dd HH:mm:ss') as receiptdate,
        |shipstruct,
        |shipmode,
        |comment
        |from lineitem_lk_hive
        |)
        |""".stripMargin)



  }

}
