
import org.apache.spark.{SPARK_BUILD_DATE, SparkConf}
import org.apache.spark.sql.{SaveMode, SparkSession, functions}
import org.apache.spark.sql.functions.{col, to_date}
import org.apache.spark.sql.types.{DataType, DateType, IntegerType, StringType}


//数据清洗加导入
object spark {
  def main(args: Array[String]): Unit = {
    //1.spark配置对象
    var sparkConf = new  SparkConf()
    .setMaster("local[*]")
    //2  sparksession 对象
    val sparksession = SparkSession
      .builder()
      .config(sparkConf)
      .enableHiveSupport() // 开启hive
      .appName("Eight")
      .getOrCreate()
    val originPath = "/produce/data/produce.1734423516529.csv"
    val df = sparksession.read.option("header","true").csv(originPath)

    val dfWithCorrectDateAndCleaned = df
      .withColumn("order_date", functions.to_date(functions.col("order_date"), "yyyy/M/d"))
      .na.drop(cols = Array("order_date", "sales_region_code", "item_code", "first_cate_code", "second_cate_code", "sales_chan_name", "item_price", "ord_qty"))

    val table = dfWithCorrectDateAndCleaned.select(
      col("order_date").cast(DateType),
      col("sales_region_code") cast(IntegerType),
      col("item_code") cast(IntegerType),
      col("first_cate_code") cast(IntegerType),
      col("second_cate_code") cast(IntegerType),
      col("sales_chan_name") cast(StringType),
      col("item_price") cast(IntegerType),
      col("ord_qty") cast(IntegerType),
    )
    table.write.mode(SaveMode.Overwrite).saveAsTable("db_produce.tb_produce")


    sparksession.stop()
    sparksession.close()
  }
}
