package spark.mgworkplace.job

import java.util.Date
import java.text.SimpleDateFormat

import org.apache.spark.internal.Logging
import org.apache.spark.sql.SparkSession
import spark.mgworkplace.job.param.ParamKit._
import spark.mgworkplace.job.udf.UdfKit._

/**
  *
  * @author com.ymy.hadoop   
  * @since 2019/6/14 22:26
  */
object StatMorningPeakTimeJob extends Logging with Job {

  def main(args: Array[String]): Unit = {

    logWarning("开始加载: "+ this.getClass.getSimpleName)

    val spark = SparkSession
      .builder()
      .appName(this.getClass().getSimpleName)
      .master("local[2]")
      .getOrCreate()

    val jdbcDF = spark.read
      .format("jdbc")
      .option("url", "jdbc:mysql://cm01:3306/test?useSSL=false")
      .option("driver", "com.mysql.jdbc.Driver")
      .option("dbtable", "travel")
      .option("user", "root")
      .option("password", "123")
      .load().cache()

    jdbcDF.createOrReplaceTempView("obd_TravelInfo")
    spark.udf.register("limitYmdTime",limitYmdTime _)
    spark.udf.register("diffTimeByTimestamp",diffTimeByTimestamp _)

    /***
      *  SQL 方法处理不包含边界问题的差
      *  cast(SUM((unix_timestamp(StopTime) - unix_timestamp(StartTime)))/3600 as decimal(10,2))
      */
    val sql:String =
        s"""
           |select
           |  ObjectID,
           |  SUM(diffTimeByTimestamp(StartTime,StopTime,'${morningPeakBeginTimeStr}','${morningPeakEndTimeStr}')) diffTime
           |from
           |  obd_TravelInfo
           |where
           |  limitYmdTime(StartTime,'${monthBeginDateStr}','${monthEndDateStr}') == true
           |and
           |  limitYmdTime(StopTime,'${monthBeginDateStr}','${monthEndDateStr}') == true
           |and
           |  datediff(StopTime,StartTime) == 0
           |""".stripMargin

    logWarning(this.getClass.getSimpleName + "Execute Start Time : " + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss:S").format(new Date))
    spark.sql(sql).show(1000,false)
    logWarning(this.getClass.getSimpleName + "Execute End Time : " + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss:S").format(new Date))

    spark.close()
  }

//
}
