package com.lenovo.userprofile

import java.text.SimpleDateFormat
import java.util.Date
import org.apache.hadoop.hbase.HBaseConfiguration
import org.apache.hadoop.hbase.client.Put
import org.apache.hadoop.hbase.io.ImmutableBytesWritable
import org.apache.hadoop.hbase.mapred.TableOutputFormat
import org.apache.hadoop.hbase.util.Bytes
import org.apache.hadoop.mapred.JobConf
import org.apache.spark.sql.SparkSession

object ETL_system_mon_1 {
  def main(args: Array[String]): Unit = {
    val sparkSession = SparkSession.builder.master("yarn").appName("ETL_system_mon_1").enableHiveSupport().getOrCreate()

    val hbase_conf = HBaseConfiguration.create()
    val tablename = "upp:upp_user_profile"
    val jobConf = new JobConf(hbase_conf)
    jobConf.setOutputFormat(classOf[TableOutputFormat])
    jobConf.set(TableOutputFormat.OUTPUT_TABLE, tablename)

    val now: Date = new Date()
    val dateFormat: SimpleDateFormat = new SimpleDateFormat("yyyyMMdd")
    val date = dateFormat.format(now)
    println(date.toLong)
    val now_date_num = date.toLong

    //ECC  egp301_usr02
    var ECC_system_logo = sparkSession.sql("select LOWER(t.bname) from (select trim(lower(egp.bname)) bname,cast(egp.gltgb as bigint) date_num from ccsd.egp301_usr02 egp join ccsd.ad_user_upp ad on lower(ad.user_name) = trim(lower(egp.bname)) where egp.bname is not null AND egp.bname!='' AND Lower(egp.bname) !='null') t where t.date_num  is not null  AND Lower(t.date_num) !='null' AND t.date_num >" + now_date_num )
    ECC_system_logo.show()
    ECC_system_logo.rdd.map(item=>{
      println(item(0))
      val put = new Put(Bytes.toBytes(item(0).toString()))
      //ECC_system_logo
      put.addColumn(Bytes.toBytes("system"), Bytes.toBytes("ECC_system_logo"), Bytes.toBytes("Y"))
      (new ImmutableBytesWritable, put)
    }).saveAsHadoopDataset(jobConf)


    val CIP_DF = sparkSession.sql("select LOWER(t.bname) from (select trim(lower(egp.bname)) bname,cast(egp.gltgb as bigint) date_num from ccsd.cip301_usr02 egp join ccsd.ad_user_upp ad on lower(ad.user_name) = trim(lower(egp.bname)) where egp.bname is not null AND egp.bname!='' AND Lower(egp.bname) !='null') t where t.date_num  is not null  AND Lower(t.date_num) !='null' AND t.date_num >" + now_date_num )

    CIP_DF.rdd.map(item=>{
      println(item(0))
      val put = new Put(Bytes.toBytes(item(0).toString()))
      //ECC_system_logo
      put.addColumn(Bytes.toBytes("system"), Bytes.toBytes("CIP_system_logo"), Bytes.toBytes("Y"))
      (new ImmutableBytesWritable, put)
    }).saveAsHadoopDataset(jobConf)


    val CSP_DF = sparkSession.sql("select LOWER(t.bname) from (select trim(lower(egp.bname)) bname,cast(egp.gltgb as bigint) date_num from ccsd.csp301_usr02 egp join ccsd.ad_user_upp ad on lower(ad.user_name) = trim(lower(egp.bname)) where egp.bname is not null AND egp.bname!='' AND Lower(egp.bname) !='null') t where t.date_num  is not null  AND Lower(t.date_num) !='null' AND t.date_num >" + now_date_num )

    CSP_DF.rdd.map(item=>{
      println(item(0))
      val put = new Put(Bytes.toBytes(item(0).toString()))
      //ECC_system_logo
      put.addColumn(Bytes.toBytes("system"), Bytes.toBytes("CSP_system_logo"), Bytes.toBytes("Y"))
      (new ImmutableBytesWritable, put)
    }).saveAsHadoopDataset(jobConf)

    val CGP_DF = sparkSession.sql("select LOWER(t.bname) from (select trim(lower(egp.bname)) bname,cast(egp.gltgb as bigint) date_num from ccsd.cgp301_usr02 egp join ccsd.ad_user_upp ad on lower(ad.user_name) = trim(lower(egp.bname)) where egp.bname is not null AND egp.bname!='' AND Lower(egp.bname) !='null') t where t.date_num  is not null  AND Lower(t.date_num) !='null' AND t.date_num >" + now_date_num )

    CGP_DF.rdd.map(item=>{
      println(item(0))
      val put = new Put(Bytes.toBytes(item(0).toString()))
      //ECC_system_logo
      put.addColumn(Bytes.toBytes("system"), Bytes.toBytes("CGP_system_logo"), Bytes.toBytes("Y"))
      (new ImmutableBytesWritable, put)
    }).saveAsHadoopDataset(jobConf)

    val SRM_EBP_DF = sparkSession.sql("select LOWER(t.bname) from (select trim(lower(egp.bname)) bname,cast(egp.gltgb as bigint) date_num from ccsd.mgp301_usr02 egp join ccsd.ad_user_upp ad on lower(ad.user_name) = trim(lower(egp.bname)) where egp.bname is not null AND egp.bname!='' AND Lower(egp.bname) !='null') t where t.date_num  is not null  AND Lower(t.date_num) !='null' AND t.date_num >" + now_date_num )

    SRM_EBP_DF.rdd.map(item=>{
      println(item(0))
      val put = new Put(Bytes.toBytes(item(0).toString()))
      //ECC_system_logo
      put.addColumn(Bytes.toBytes("system"), Bytes.toBytes("SRM_EBP_system_logo"), Bytes.toBytes("Y"))
      (new ImmutableBytesWritable, put)
    }).saveAsHadoopDataset(jobConf)

    //rgp301_usr02

    val SRM_SUS_DF = sparkSession.sql("select LOWER(t.bname) from (select trim(lower(egp.bname)) bname,cast(egp.gltgb as bigint) date_num from ccsd.rgp301_usr02 egp join ccsd.ad_user_upp ad on lower(ad.user_name) = trim(lower(egp.bname)) where egp.bname is not null AND egp.bname!='' AND Lower(egp.bname) !='null') t where t.date_num  is not null  AND Lower(t.date_num) !='null' AND t.date_num >" + now_date_num )

    SRM_SUS_DF.rdd.map(item=>{
      println(item(0))
      val put = new Put(Bytes.toBytes(item(0).toString()))
      //ECC_system_logo
      put.addColumn(Bytes.toBytes("system"), Bytes.toBytes("SRM_SUS_system_logo"), Bytes.toBytes("Y"))
      (new ImmutableBytesWritable, put)
    }).saveAsHadoopDataset(jobConf)


    val PP_DF = sparkSession.sql("select LOWER(t.itcode) from (select trim(lower(egp.itcode)) itcode,cast(from_unixtime(cast(expire_date AS bigint),'yMd') as bigint) date_num from ccsd.prdmdmn_pt_user_account egp join ccsd.ad_user_upp ad on lower(ad.user_name) = trim(lower(egp.itcode)) where egp.itcode is not null AND egp.itcode!='' AND Lower(egp.itcode) !='null') t where t.date_num  is not null  AND Lower(t.date_num) !='null' AND t.date_num >" + now_date_num )

    PP_DF.rdd.map(item=>{
      println(item(0))
      val put = new Put(Bytes.toBytes(item(0).toString()))
      //ECC_system_logo
      put.addColumn(Bytes.toBytes("system"), Bytes.toBytes("PP_system_logo"), Bytes.toBytes("Y"))
      (new ImmutableBytesWritable, put)
    }).saveAsHadoopDataset(jobConf)


    sparkSession.stop()

  }
}
