package com.lenovo.userprofile


import com.lenovo.function.Utils
import org.apache.hadoop.hbase.HBaseConfiguration
import org.apache.hadoop.hbase.client.Put
import org.apache.hadoop.hbase.io.ImmutableBytesWritable
import org.apache.hadoop.hbase.mapred.TableOutputFormat
import org.apache.hadoop.hbase.util.Bytes
import org.apache.hadoop.mapred.JobConf
import org.apache.spark.sql.SparkSession

object ETL_system_mon_5 {
  case class T_code(user_name:String,transaction_code:String,num:String)
  def main(args: Array[String]): Unit = {
    val util = new Utils
    val sparkSession = SparkSession.builder.master("yarn").appName("ETL_system_mon_5").enableHiveSupport().getOrCreate()
    val hbase_conf = HBaseConfiguration.create()
    val tablename = "upp:upp_user_profile"
    val jobConf = new JobConf(hbase_conf)
    jobConf.setOutputFormat(classOf[TableOutputFormat])
    jobConf.set(TableOutputFormat.OUTPUT_TABLE, tablename)

    import sparkSession.sqlContext.implicits._

    //ecc_key_t_code
    import sparkSession.sqlContext.implicits._
    val ecc_t_code_df = sparkSession.sql("SELECT lower(egp.user_name),egp.transaction_code FROM ccsd.egp_audit_log egp join ccsd.ad_user_upp ad on lower(ad.user_name) = lower(egp.user_name) where egp.transaction_code IS NOT NULL AND egp.transaction_code!='' AND lower(egp.transaction_code) !='null' AND egp.transaction_code!='SESSION_MANAGER' AND date_format(cast(unix_timestamp(egp.exec_date,'yyyyMMdd') as timestamp),'YYYY-MM-dd') > date_sub('" + util.getDay() + "'," + 180 + ")" )
    ecc_t_code_df.rdd.map(item=>{
      (item(0)+"#"+item(1),1)
    }).reduceByKey(_+_).map(item=>{T_code(item._1.split("#")(0),item._1.split("#")(1),item._2.toString)}).toDF().createTempView("ecc_t_code_tmp")

    val ecc_t_code_result = sparkSession.sql("select t.user_name user_name,t.transaction_code transaction_code,t.num num from (select user_name,transaction_code,num,row_number() over( partition by user_name order by CAST(num AS int) desc ) rn from ecc_t_code_tmp) t where t.rn <=5 ")
    ecc_t_code_result.rdd.map(item=>{(item(0)+"",item(1)+"")}).reduceByKey(_+","+_).filter(item=>{
      (item._1+"").length>0 && !("").equals(item._1+"")
    }).map(row=>{
      val put = new Put(Bytes.toBytes(row._1+""))
      put.addColumn(Bytes.toBytes("system"), Bytes.toBytes("ecc_key_t_code"), Bytes.toBytes(row._2+""))
      (new ImmutableBytesWritable, put)
    }).saveAsHadoopDataset(jobConf)

    //gts_key_t_code
    import sparkSession.sqlContext.implicits._
    val gts_t_code_df = sparkSession.sql("SELECT lower(egp.user_name),egp.transaction_code FROM ccsd.ggp_audit_log egp join ccsd.ad_user_upp ad on lower(ad.user_name) = lower(egp.user_name) where egp.transaction_code IS NOT NULL AND egp.transaction_code!='' AND lower(egp.transaction_code) !='null' AND egp.transaction_code!='SESSION_MANAGER' AND date_format(cast(unix_timestamp(egp.exec_date,'yyyyMMdd') as timestamp),'YYYY-MM-dd') > date_sub('" + util.getDay() + "'," + 180 + ")" )
    gts_t_code_df.rdd.map(item=>{
      (item(0)+"#"+item(1),1)
    }).reduceByKey(_+_).map(item=>{T_code(item._1.split("#")(0),item._1.split("#")(1),item._2.toString)}).toDF().createTempView("gts_t_code_tmp")

    val gts_t_code_result = sparkSession.sql("select t.user_name user_name,t.transaction_code transaction_code,t.num num from (select user_name,transaction_code,num,row_number() over( partition by user_name order by CAST(num AS int) desc ) rn from gts_t_code_tmp) t where t.rn <=5 ")
    gts_t_code_result.rdd.map(item=>{(item(0)+"",item(1)+"")}).reduceByKey(_+","+_).filter(item=>{
      (item._1+"").length>0 && !("").equals(item._1+"")
    }).map(row=>{
      val put = new Put(Bytes.toBytes(row._1+""))
      put.addColumn(Bytes.toBytes("system"), Bytes.toBytes("gts_key_t_code"), Bytes.toBytes(row._2+""))
      (new ImmutableBytesWritable, put)
    }).saveAsHadoopDataset(jobConf)

    sparkSession.stop()
  }
}
