package com.lenovo.userprofile

import java.text.SimpleDateFormat
import java.util.{Calendar, Date, GregorianCalendar, TimeZone}
import com.lenovo.function.Utils
import org.apache.hadoop.hbase.HBaseConfiguration
import org.apache.hadoop.hbase.client.Put
import org.apache.hadoop.hbase.io.ImmutableBytesWritable
import org.apache.hadoop.hbase.mapred.TableOutputFormat
import org.apache.hadoop.hbase.util.Bytes
import org.apache.hadoop.mapred.JobConf
import org.apache.spark.sql.SparkSession

object ETL_system_mon_3 {

  case class AD(user_name:String,country:String)
  case class Day_time(email:String,time_interval:String,num:String)
  case class Week_time(email:String,time_interval:String,num:String)
  case class Month_time(email:String,time_interval:String,num:String)
  case class T_code(user_name:String,transaction_code:String,num:String)


  def main(args: Array[String]): Unit = {
    val util = new Utils
    val sparkSession = SparkSession.builder.master("yarn").appName("ETL_system_mon_3").enableHiveSupport().getOrCreate()
    val hbase_conf = HBaseConfiguration.create()
    val tablename = "upp:upp_user_profile"
    val jobConf = new JobConf(hbase_conf)
    jobConf.setOutputFormat(classOf[TableOutputFormat])
    jobConf.set(TableOutputFormat.OUTPUT_TABLE, tablename)


    import sparkSession.sqlContext.implicits._

    //all_country
    sparkSession.sql("select lower(ad.user_name),emp.country,ad.country from ccsd.ad_user_upp ad left join ccsd.hr_employee emp on lower(ad.user_name ) = lower(emp.itcode) where ad.user_name !='' and lower(ad.user_name) !='null' and ad.user_name is not null ").rdd.map(item=>{
      AD(item(0)+"",util.filterNotNull(item(1)+"",item(2)+"").toString)
    }).toDF().createTempView("ad")
    val df = sparkSession.read.format("jdbc").options(Map("url" -> util.url , "driver" -> util.driver, "dbtable" -> "tb_country_mapping", "user" -> util.user, "password" -> util.password)).load()
    df.createTempView("mysql_tmp")
    sparkSession.sql("select ad.user_name user_name,t2.traget_country country from ad left join mysql_tmp t2 on lower(ad.country) = lower(t2.workday_country) ").createTempView("all_country")


    //ecc天操作高峰 //ecc周操作高峰 //ecc月操作高峰
    val all_df = sparkSession.sql("SELECT LOWER(egp.user_name) user_name,egp.exec_date exec_date,ad.country,egp.exec_time exec_time from ccsd.egp_audit_log egp join all_country ad on lower(ad.user_name) = lower(egp.user_name)  where egp.transaction_code is not null and egp.transaction_code!='' and lower(egp.transaction_code) != 'null' AND date_format(cast(unix_timestamp(egp.exec_date,'yyyyMMdd') as timestamp),'YYYY-MM-dd') > date_sub('" + util.getDay() + "'," + 180 + ")")
    //day
    all_df.rdd.map(row => {
      (row(0)+"#"+get_day_time_interval(row(1)+"-"+row(3),row(2)+"","9:00-12:00-14:00-17:00-19:00"),1)
    }).reduceByKey(_+_).map(row=> {
      println(row._1.split("#")(0)+"======="+row._1.split("#")(1)+"=================="+row._2)
      Day_time(row._1.split("#")(0)+"",row._1.split("#")(1)+"",row._2+"")
    }).toDF().createTempView("day_time_interval_tmp")
    val day_time_interval_resultDF = sparkSession.sql("SELECT a.email,a.time_interval,a.num,b.num FROM (select t.email email,t.time_interval time_interval,t.num num from (select email,time_interval,num,row_number() over( partition by email order by CAST(num AS int) desc ) rn  from day_time_interval_tmp) t where t.rn =1) a left join (select t.email email,t.time_interval time_interval,t.num num from (select email,time_interval,num,row_number() over( partition by email order by  CAST(num AS int) desc ) rn  from day_time_interval_tmp) t where t.rn =2) b  on (a.email = b.email)")

    day_time_interval_resultDF.rdd.map(row=>{
      val put = new Put(Bytes.toBytes(row(0)+""))
      if ( (row(2)+"") == (row(3)+"") && !("").equals(row(2))&& row(2)!= null){
        println("num1 : " + row(2) +" num2 : "+ row(3))
        put.addColumn(Bytes.toBytes("system"), Bytes.toBytes("ecc_day_operation_peak"), Bytes.toBytes("Average"))
      }
      else
        put.addColumn(Bytes.toBytes("system"), Bytes.toBytes("ecc_day_operation_peak"), Bytes.toBytes(row(1).toString))
      (new ImmutableBytesWritable, put)
    }).saveAsHadoopDataset(jobConf)

    //week
    all_df.rdd.map(row => {
      if ((row(1)+"").length > 0)
        (row(0).toString+"#"+get_week_time_interval(row(1)+"-"+row(3)+"",row(2)+""),1)
      else
        ("",0)}
    ).filter(_._2 != 0).reduceByKey(_+_).map(row=> {Week_time(row._1.split("#")(0),row._1.split("#")(1),row._2.toString)}).toDF()
    .createTempView("week_time_interval_tmp")

    val week_time_interval_resultDF = sparkSession.sql("SELECT a.email,a.time_interval,a.num,b.num FROM (select t.email email,t.time_interval time_interval,t.num num from (select email,time_interval,num,row_number() over( partition by email order by CAST(num AS int) desc ) rn  from week_time_interval_tmp) t where t.rn =1) a left join (select t.email email,t.time_interval time_interval,t.num num from  (select email,time_interval,num,row_number() over( partition by email order by CAST(num AS int) desc ) rn  from week_time_interval_tmp) t where t.rn =2) b  on (a.email = b.email)")

    week_time_interval_resultDF.rdd.map(row=>{
      val put = new Put(Bytes.toBytes(row(0)+""))
      if ((row(2)+"") == (row(3)+"") && !("").equals(row(2))&& row(2)!= null ){
        println("num1 : " + row(2) +" num2 : "+ row(3))
        put.addColumn(Bytes.toBytes("system"), Bytes.toBytes("ecc_week_operation_peak"), Bytes.toBytes("Average"))
      }
      else
        put.addColumn(Bytes.toBytes("system"), Bytes.toBytes("ecc_week_operation_peak"), Bytes.toBytes(row(1).toString))
      (new ImmutableBytesWritable, put)
    }).saveAsHadoopDataset(jobConf)

    //month
    val month_time_interval_rdd = all_df.rdd.map(row => {
      (row(0)+"#"+get_motnth_time_interval(row(1)+"-"+row(3)+"",row(2)+""),1)
    }).reduceByKey(_+_).filter(_._1.split("#").length==2).map(row=> {Month_time(row._1.split("#")(0)+"",row._1.split("#")(1)+"",row._2+"")})
      .toDF().createTempView("month_time_interval_tmp")

    val month_time_interval_resultDF = sparkSession.sql("select a.email email,a.time_interval time_interval,a.num num1,b.num num2 from (select t.email email,t.time_interval time_interval,t.num num from (select email,time_interval,num,row_number() over( partition by email order by CAST(num AS int) desc ) rn  from month_time_interval_tmp) t where t.rn =1 ) a left join (select t.email email,t.time_interval time_interval,t.num num from (select email,time_interval,num,row_number() over( partition by email order by CAST(num AS int) desc ) rn  from month_time_interval_tmp) t where t.rn =2 ) b on (a.email = b.email)")
    month_time_interval_resultDF.rdd.map(row=>{
      val put = new Put(Bytes.toBytes(row(0)+""))
      if ( (row(2)+"") == (row(3)+"") && !("").equals(row(2)+"") && row(2)!= null){
        put.addColumn(Bytes.toBytes("system"), Bytes.toBytes("ecc_month_operation_peak"), Bytes.toBytes("Average"))
      }
      else
        put.addColumn(Bytes.toBytes("system"), Bytes.toBytes("ecc_month_operation_peak"), Bytes.toBytes(row(1)+""))
      (new ImmutableBytesWritable, put)
    }).saveAsHadoopDataset(jobConf)

    sparkSession.stop()

  }

  def get_day_time_interval(time :String,country:String,peak_with_day:String):String={
    if(time==null || "".equals(time) || "null".equals(time.toLowerCase)) return  "NULL"
    val sdf = new SimpleDateFormat("yyyyMMdd-HHmmss")
    val util = new Utils

    val c1 = new GregorianCalendar
    c1.setTime(sdf.parse(time))
    c1.setTimeZone(TimeZone.getTimeZone(util.get_locale(country)))

    val hour = c1.get(Calendar.HOUR_OF_DAY)
    if (peak_with_day.split("-")(0).split(":")(0).toInt <= hour && hour < peak_with_day.split("-")(1).split(":")(0).toInt)
      return "Morning"
    else if (peak_with_day.split("-")(1).split(":")(0).toInt <= hour && hour < peak_with_day.split("-")(2).split(":")(0).toInt)
      return "Noon"
    else if (peak_with_day.split("-")(2).split(":")(0).toInt <= hour && hour < peak_with_day.split("-")(3).split(":")(0).toInt)
      return "Afternoon"
    else if (peak_with_day.split("-")(3).split(":")(0).toInt <= hour && hour < peak_with_day.split("-")(4).split(":")(0).toInt)
      return "Before off work"
    else
      return "Overtime"
  }

  def get_week_time_interval(time :String,country:String):String={
    if(time==null || "".equals(time) || "null".equals(time.toLowerCase)) return  "NULL"
    val sdf = new SimpleDateFormat("yyyyMMdd-HHmmss")
    val util = new Utils
    val c1 = new GregorianCalendar
    c1.setTime(sdf.parse(time))
    c1.setTimeZone(TimeZone.getTimeZone(util.get_locale(country)))
    val day = c1.get(Calendar.DAY_OF_WEEK)
    if (day ==2)
      "Monday"
    else if (day ==3)
      "Tuesday"
    else if (day ==4)
      "Wednesday"
    else if (day ==5)
      "Thursday"
    else if (day ==6)
      "Friday"
    else if(day ==1 || day == 7)
      "Weekend"
    else day+""
  }

  def get_motnth_time_interval(date: String,country :String): String = {
    if(date ==null || "".equals(date) || "null".equals(date.toLowerCase)) return  ""
    val sdf = new SimpleDateFormat("yyyyMMdd-HHmmss")
    val pDate = sdf.parse(date)
    val lDate = getLastMonDay(pDate,country)
    if(getCurMonDay(pDate,country)<=4 || (lDate-getCurMonDay(pDate,country)<=4) ) "Month End Closing"
    else if(getCurMonDay(pDate,country)<4 || (lDate-getCurMonDay(pDate,country)<=7)) "ROL"
    else if(getCurMonDay(pDate,country)>4 && getCurMonDay(pDate,country)<=10 ) "Early Month"
    else if(getCurMonDay(pDate,country)>10 && getCurMonDay(pDate,country)<=20 ) "Mid Month"
    else "Late Month"

  }

  def getLastMonDay(date: Date,country :String ): Int = {
    val util = new Utils
    // val ca = Calendar.getInstance(TimeZone.getTimeZone(util.get_locale(country)))
    val ca = new GregorianCalendar
    ca.setTime(date)
    ca.setTimeZone(TimeZone.getTimeZone(util.get_locale(country)))
    return ca.getActualMaximum(Calendar.DATE)
  }

  def getCurMonDay(date: Date,country:String): Int = {
    val util = new Utils
    val ca = new GregorianCalendar
    ca.setTime(date)
    ca.setTimeZone(TimeZone.getTimeZone(util.get_locale(country)))
    ca.setTime(date)
    return ca.get(Calendar.DAY_OF_MONTH)
  }

  def getDay(): String ={
    //select date_sub(’2012-12-08′,10)
    val df = new SimpleDateFormat("yyyy-MM-dd");
    return df.format(new Date());
  }

}