package com.guchenbo.spark.sql

import org.apache.spark.sql.types._
import org.apache.spark.sql.{SaveMode, SparkSession}

/**
 *
 * @author guchenbo
 * @date 2024/6/26
 */
object PeopleDemo2 {

  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder().master("local[*]").appName("demo")
      .enableHiveSupport()
      .getOrCreate()
    val sc = spark.sparkContext

    val fields = List[StructField](StructField("name", StringType), StructField("age", IntegerType), StructField("job", StringType), StructField("birth", DateType))
    val schema = StructType(fields)

    var dataMassCheck = "PERMISSIVE"
    //    var dataMassCheck="DROPMALFORMED"
    //    var dataMassCheck="FAILFAST"

    val sr = spark.read
      .option("sep", ";")
      .option("encoding", "UTF-8")
      .option("header", "true")
      .option("escape", "\"")
      .option("mode", dataMassCheck)
    sr.schema(schema)

    val df = sr.format("csv").load(path("people.csv"))
    df.write.format("csv").mode(SaveMode.Overwrite).save("output/people.csv")

    df.write.format("csv").mode(SaveMode.Overwrite).saveAsTable("people")
  }

  def path(s: String): String = {
    s"spark-sql/src/main/resources/$s"
  }
}
