package com.bw.sparksql1.job2

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.sql.functions._

/**
  *
  *  案例实现1：通过日期获取星期
  */
object Job14 {


    def main(args: Array[String]): Unit = {
      Logger.getLogger("org").setLevel(Level.ERROR)
      val spark = SparkSession
        .builder()
        .master("local")
        .appName("Spark SQL basic example")
        .getOrCreate()


      //测试方法
      val str = getDayOfWeek("2021-05-14")
//      println(str)

      //在dataframe中测试 -> 方式1
      val usersDF = spark.read.format("csv").option("header","true").option("delimiter",",").load("users.csv")
//      usersDF.show()
      val usersDF2 = usersDF.withColumn("join_week_day",udfDayOfWeek(col("joinedAt"))).drop("joinedAt")
//      usersDF2.show()

      //在dataframe中测试 -> 方式2
      spark.udf.register("udfDayOfWeek",(s_date:String) => getDayOfWeek(s_date))
      usersDF.createOrReplaceTempView("users")
      spark.sql("select user_id,udfDayOfWeek(joinedAt) as week_day from users").show()


      //在dataframe中测试 -> 方式3





    }

  //普通函数不能在sparkSQL中使用，需要注册
  val udfDayOfWeek = udf((s_date:String) => getDayOfWeek(s_date) )

  //定义业务方法 2021-05-14 -> 星期五
  def getDayOfWeek(s_date:String) :String = {
    import java.util.Calendar
    val weeks = Array("星期7","星期1","星期2","星期3","星期4","星期5","星期6")
    val cal = Calendar.getInstance

    //日期格式 2021-05-14 00:00:00
    val dt_format = new java.text.SimpleDateFormat("yyyy-MM-dd")
    val date = dt_format.parse(s_date)
    cal.setTime(date)

    //获取星期
    val week_index = cal.get(Calendar.DAY_OF_WEEK) - 1
    val pattern = """([0-9]{4})-([0-9]{2})-([0-9]{2}).*""".r
    s_date match {
//      case pattern(year,month,day) => weeks(week_index)
//      case _=> "unknown"
      case pattern(year,month,day) => scala.util.Try(weeks(week_index)).toOption.getOrElse("unknown")
      case _=> "unknown"
    }
  }

}