package dataframe

import org.apache.spark.SparkConf
import org.apache.spark.sql.functions.udf
import org.apache.spark.sql.{Dataset, SparkSession}

object DataFrame_FinalDemo06 {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local[*]")
    conf.setAppName("DataFrame_FinalDemo06")

    val spark: SparkSession = SparkSession
      .builder()
      .config(conf)
      .getOrCreate()
    import spark.implicits._

    val bookDS: Dataset[Book] = spark.sparkContext.parallelize(Seq(
      Book("浮生六记", "[清]沈复", "2018/7/1"),
      Book("云边有个小卖部", "张嘉佳", "2018/07/12"),
      Book("菊与刀", "[美]本尼迪克特", null),
      Book("苏菲的世界", "乔斯坦·贾德", "2017,10 12"),
      Book("罗生门", null, null)
    )).toDS()

    val toDateUDF=udf((s: String) => {
      var (year, month, day) = ("", "", "")
      if (s != null) {
        val fields = s.split(" |/|,")
        year = fields(0)
        month = "%02d".format(fields(1).toInt)
        day = "%02d".format(fields(2).toInt)
        year + "-" + month + "-" + day
      } else
        null
    })

    val myFunction = udf(toDateUDF1(_: String): String)

    bookDS.withColumn(
      "pubTime",
      toDateUDF(bookDS("pubTime"))
    )
      .show()

    bookDS.withColumn(
      "111",
        myFunction(bookDS("pubTime")
      )
    ).show()


    spark.stop()
  }

  def toDateUDF1(s: String) = {
    var (year, month, day) = ("", "", "")
    if (s != null) {
      val fields = s.split(" |/|,")
      year = fields(0)
      month = "%02d".format(fields(1).toInt)
      day = "%02d".format(fields(2).toInt)
      year + "-" + month + "-" + day
    } else
      null
  }

  case class Book(title: String, author: String, pubTime: String)

}
