package com.lenovo.userprofile

import java.text.SimpleDateFormat
import java.util.{Calendar, Date, GregorianCalendar, TimeZone}

import com.lenovo.function.Utils
import org.apache.hadoop.hbase.HBaseConfiguration
import org.apache.hadoop.hbase.client.Put
import org.apache.hadoop.hbase.io.ImmutableBytesWritable
import org.apache.hadoop.hbase.mapred.TableOutputFormat
import org.apache.hadoop.hbase.util.Bytes
import org.apache.hadoop.mapred.JobConf
import org.apache.spark.sql.SparkSession

object ETL_behavior_mon_1 {
  def main(args: Array[String]): Unit = {

    var util = new Utils
    val sparkSession = SparkSession.builder.master("yarn").appName("ETL_behavior_mon_1").enableHiveSupport().getOrCreate()

    val hbase_conf = HBaseConfiguration.create()
    //hbase_conf.set("hbase.zookeeper.property.clientPort", "2181")
    val tablename = "upp:upp_user_profile"
    val jobConf = new JobConf(hbase_conf)
    jobConf.setOutputFormat(classOf[TableOutputFormat])
    jobConf.set(TableOutputFormat.OUTPUT_TABLE, tablename)

   // var all_df = sparkSession.sql("select  LOWER(dw.internetemail),dw.submitdate from cctest.dw_incident dw where reported_sourcename !='Monitor' AND categorization_tier_1 !='Customer Voice' AND ( product_categorization_tier_1 =='Applications - Strategic' OR product_categorization_tier_1 =='Applications - Non Strategic China' OR product_categorization_tier_1 =='Applications - Manufactory' OR product_categorization_tier_1 =='Communication & Collaboration' OR product_categorization_tier_1 =='DC Operation' OR product_categorization_tier_1 =='Facility' OR product_categorization_tier_1 =='Network' OR product_categorization_tier_1 =='Security' OR product_categorization_tier_1 =='Server' OR product_categorization_tier_1 =='Storage' OR product_categorization_tier_1 =='Voice' OR product_categorization_tier_1 =='Desktop & Office Software' OR product_categorization_tier_1 =='Hardware Maintenance' OR product_categorization_tier_1 =='Infrasturcture Application' OR product_categorization_tier_1 =='Others' ) AND internetemail is not null AND internetemail!='' AND LOWER(internetemail)!= 'null'")
    //var all_df = sparkSession.sql("select LOWER(substring_index(t1.email,'@',1)),t1.submitdate, ad.country from (select  LOWER(dw.internetemail) email ,dw.submitdate submitdate from cctest.dw_incident dw where LOWER(internetemail) not like '%monitor%' AND reported_sourcename !='Monitor' AND categorization_tier_1 !='Customer Voice' AND ( product_categorization_tier_1 =='Applications - Strategic' OR product_categorization_tier_1 =='Applications - Non Strategic China' OR product_categorization_tier_1 =='Applications - Manufactory' OR product_categorization_tier_1 =='Communication & Collaboration' OR product_categorization_tier_1 =='DC Operation' OR product_categorization_tier_1 =='Facility' OR product_categorization_tier_1 =='Network' OR product_categorization_tier_1 =='Security' OR product_categorization_tier_1 =='Server' OR product_categorization_tier_1 =='Storage' OR product_categorization_tier_1 =='Voice' OR product_categorization_tier_1 =='Desktop & Office Software' OR product_categorization_tier_1 =='Hardware Maintenance' OR product_categorization_tier_1 =='Infrasturcture Application' OR product_categorization_tier_1 =='Others' ) AND internetemail is not null AND internetemail!='' AND LOWER(internetemail)!= 'null' ) t1  join cctest.ad_user_upp ad on LOWER(substring_index(t1.email,'@',1)) = LOWER(ad.user_name)")


    sparkSession.sql("select LOWER(substring_index(t1.email,'@',1)) email,t1.submitdate submitdate, ad.country from (select  LOWER(dw.internetemail) email ,dw.submitdate submitdate from ccsd.itsm_dw_incident dw where LOWER(internetemail) not like '%monitor%' AND reported_sourcename !='Monitor' AND categorization_tier_1 !='Customer Voice' AND ( product_categorization_tier_1 =='Applications - Strategic' OR product_categorization_tier_1 =='Applications - Non Strategic China' OR product_categorization_tier_1 =='Applications - Manufactory' OR product_categorization_tier_1 =='Communication & Collaboration' OR product_categorization_tier_1 =='DC Operation' OR product_categorization_tier_1 =='Facility' OR product_categorization_tier_1 =='Network' OR product_categorization_tier_1 =='Security' OR product_categorization_tier_1 =='Server' OR product_categorization_tier_1 =='Storage' OR product_categorization_tier_1 =='Voice' OR product_categorization_tier_1 =='Desktop & Office Software' OR product_categorization_tier_1 =='Hardware Maintenance' OR product_categorization_tier_1 =='Infrasturcture Application' OR product_categorization_tier_1 =='Others' ) AND internetemail is not null AND internetemail!='' AND LOWER(internetemail)!= 'null'  AND date_format(dw.submitdate,'yyyy-MM-dd') > date_sub('"+util.getDay() + " ','180')) t1  join ccsd.ad_user_upp ad on LOWER(substring_index(t1.email,'@',1)) = LOWER(ad.user_name)")
      .createTempView("all_data")

    import sparkSession.sqlContext.implicits._
    sparkSession.sql("select lower(ad.user_name),emp.country country1,ad.country country2 from ccsd.ad_user_upp ad left join ccsd.hr_employee emp on lower(ad.user_name) = lower(emp.itcode)").rdd.map(item =>{
      val country = util.filterNotNull(item(1)+"",item(2)+"")
      (item(0)+"",country)
    }).toDF("user_name","country").createTempView("all_country")

    sparkSession.sql("select t1.email email,t1.submitdate submitdate,t2.country country from all_data t1 join all_country t2 on  t1.email = t2.user_name ").createTempView("all")

    val df = sparkSession.read.format("jdbc").options(Map("url" -> util.url , "driver" -> util.driver, "dbtable" -> "tb_country_mapping", "user" -> util.user, "password" -> util.password)).load()
    df.show(10)
    df.createTempView("mysql_tmp")

    var all_df = sparkSession.sql("select t1.email email,t1.submitdate submitdate,t2.traget_country from all t1 join mysql_tmp t2 on lower(t1.country) = lower(t2.workday_country) ")
    all_df.show()

    import sparkSession.sqlContext.implicits._
    val month_time_interval_rdd = all_df.rdd.map(row => {
        (row(0)+"#"+get_motnth_time_interval(row(1)+"",row(2)+""),1)
        }).reduceByKey(_+_).filter(_._1.split("#").length==2).map(row=> {(row._1.split("#")(0)+"",row._1.split("#")(1)+"",row._2+"")})
     .toDF("email","time_interval","num").createTempView("month_time_interval_tmp")


    val month_time_interval_resultDF = sparkSession.sql("select a.email email,a.time_interval time_interval,a.num num1,b.num num2 from (select t.email email,t.time_interval time_interval,t.num num from (select email,time_interval,num,row_number() over( partition by email order by CAST(num AS int) desc ) rn  from month_time_interval_tmp) t where t.rn =1 ) a left join (select t.email email,t.time_interval time_interval,t.num num from (select email,time_interval,num,row_number() over( partition by email order by CAST(num AS int) desc ) rn  from month_time_interval_tmp) t where t.rn =2 ) b on (a.email = b.email)")

    month_time_interval_resultDF.rdd.map(row=>{
      val put = new Put(Bytes.toBytes(null2Str(row(0))))
      if ( (row(2)+"") == (row(3)+"") && !("").equals(row(2)+"") && row(2)!= null){
     //   println("num1 : " + row(2) +" num2 : "+ row(3))
        put.addColumn(Bytes.toBytes("behavior"), Bytes.toBytes("peak_month_user"), Bytes.toBytes("Average"))
      }
      else
        put.addColumn(Bytes.toBytes("behavior"), Bytes.toBytes("peak_month_user"), Bytes.toBytes(row(1)+""))
      (new ImmutableBytesWritable, put)
    }).saveAsHadoopDataset(jobConf)

    sparkSession.stop()
  }
  //null2Str function
  def null2Str(data :Any):String={
    if (data==null)
      return "";
    else
      return data+"";
  }
  //filterNotNull function
  def filterNotNull(fister :String,second :String):String={
    if (!"".equals(fister))
      return fister
    else
      return second
  }

  def get_motnth_time_interval(date: String,country :String): String = {
    if(date ==null || "".equals(date) || "null".equals(date.toLowerCase)) return  ""
    val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
    val pDate = sdf.parse(date)
    val lDate = getLastMonDay(pDate,country)
    println(getCurMonDay(pDate,country))
    println(lDate)
    println(lDate-getCurMonDay(pDate,country))

    if(getCurMonDay(pDate,country)<=4 || (lDate-getCurMonDay(pDate,country)<=4) ) "Month End Closing"
    else if(getCurMonDay(pDate,country)<4 || (lDate-getCurMonDay(pDate,country)<=7)) "ROL"
    else if(getCurMonDay(pDate,country)>4 && getCurMonDay(pDate,country)<=10 ) "Early Month"
    else if(getCurMonDay(pDate,country)>10 && getCurMonDay(pDate,country)<=20 ) "Mid Month"
    else "Late Month"

  }

  def getLastMonDay(date: Date,country :String ): Int = {
    val util = new Utils
    // val ca = Calendar.getInstance(TimeZone.getTimeZone(util.get_locale(country)))
    val ca = new GregorianCalendar
    ca.setTime(date)
    ca.setTimeZone(TimeZone.getTimeZone(util.get_locale(country)))
    return ca.getActualMaximum(Calendar.DATE)
  }

  def getCurMonDay(date: Date,country:String): Int = {
    val util = new Utils
    val ca = new GregorianCalendar
    ca.setTime(date)
    ca.setTimeZone(TimeZone.getTimeZone(util.get_locale(country)))
    ca.setTime(date)
    return ca.get(Calendar.DAY_OF_MONTH)
  }

}
