package com.lenovo.userprofile

import com.lenovo.jdbc.DBHelper
import org.apache.hadoop.hbase.HBaseConfiguration
import org.apache.hadoop.hbase.client.Put
import org.apache.hadoop.hbase.io.ImmutableBytesWritable
import org.apache.hadoop.hbase.mapred.TableOutputFormat
import org.apache.hadoop.hbase.util.Bytes
import org.apache.hadoop.mapred.JobConf
import org.apache.spark.sql.{Row, SparkSession}

object ETL_nature_mon_3 {

  case class Person(itcode :String ,mitcode : String)
  def main(args: Array[String]): Unit = {

    val sparkSession = SparkSession.builder.master("yarn").appName("ETL_nature_mon_3").enableHiveSupport().getOrCreate()
    val hbase_conf = HBaseConfiguration.create()
    val tablename = "upp:upp_user_profile"
    val jobConf = new JobConf(hbase_conf)
    jobConf.setOutputFormat(classOf[TableOutputFormat])
    jobConf.set(TableOutputFormat.OUTPUT_TABLE, tablename)

    //val all_df = sparkSession.sql("select t1.itcode,t1.mitcode from (select lower(hr1.itcode) itcode,lower(hr2.itcode) mitcode from ccsd.hr_employee hr1 LEFT JOIN ccsd.hr_employee hr2 on hr1.manager_id =hr2.employee_id where lower(hr1.currently_active) ='yes' and hr1.itcode is not null AND hr1.itcode !='' AND LOWER(hr1.itcode)!='null' )t1 join ccsd.ad_user_upp ad on t1.itcode = LOWER(ad.user_name)")

    val all_user_df = sparkSession.sql("select lower(ad.user_name) itcode,t1.mitcode,ad.manager from (select lower(hr1.itcode) itcode,lower(hr2.itcode) mitcode from ccsd.hr_employee hr1 LEFT JOIN ccsd.hr_employee hr2 on hr1.manager_id =hr2.employee_id where hr1.itcode is not null AND hr1.itcode !=''  AND LOWER(hr1.itcode)!='null' )t1 right join ccsd.ad_user_upp ad on t1.itcode = LOWER(ad.user_name) where ad.user_name is not null AND ad.user_name !=''AND LOWER(ad.user_name) != 'null'")

    import sparkSession.sqlContext.implicits._
    val all_df = all_user_df.rdd.map(item=>{
      val mitcode = take_not_null(item(1)+"",item(2)+"")
        Person( item(0)+"", mitcode)
    }).toDF("itcode","mitcode")

    all_df.show()

    //var all_user = mutable.Map.empty[String, String]
    var all_user:Map[String,String] = Map()
    //all_user:add
    all_df.rdd.filter(_(1) != null).map(item =>{
      (item(0)+"" +"#"+item(1)+"")
    }).collect().toList.foreach(item =>{
      all_user += (item.split("#")(0)-> item.split("#")(1))
    })

    //业务BU : bu
    var bu_label:Map[String,String] = Map()
    DBHelper.getBU.toArray().foreach(item =>{
      bu_label += (item.toString.split("#")(0)-> item.toString.split("#")(1))
    })
    val all_mag_BU = bu_label.keys.toList

    all_df.rdd.filter(_(1) != null).map(item=>{
      println("0 : " + item(0)+"" +"1:"+ item(1))
      val user:List[String] = List(item(0).toString,item(1).toString)
      val last_mag_id = factorial(user,all_user,all_mag_BU)(1)
      //WRITE_BU
      val put = new Put(Bytes.toBytes(item(0)+""))//save's itcode

      if (bu_label.contains(last_mag_id)) {
        put.addColumn(Bytes.toBytes("nature"), Bytes.toBytes("bu"), Bytes.toBytes(bu_label(last_mag_id)))//save's last_mag_itcode
      }else{
        put.addColumn(Bytes.toBytes("nature"), Bytes.toBytes("bu"), Bytes.toBytes("NULL"))//save's last_mag_itcode
      }
      (new ImmutableBytesWritable, put)
    }).saveAsHadoopDataset(jobConf)


    //业务Segment :segment
    var Segment_label:Map[String,String] = Map()
    DBHelper.getSegment.toArray().foreach(item =>{
      Segment_label += (item.toString.split("#")(0)-> item.toString.split("#")(1))
    })
    val all_mag_Segment = Segment_label.keys.toList

    all_df.rdd.filter(_(1) != null).map(item=>{
      println("0 : " + item(0)+"" +"1:"+ item(1))
      val user:List[String] = List(item(0).toString,item(1).toString)
      val last_mag_id = factorial(user,all_user,all_mag_Segment)(1)
      //WRITE_BU
      val put = new Put(Bytes.toBytes(item(0)+""))//save's itcode

      if (Segment_label.contains(last_mag_id)) {
        put.addColumn(Bytes.toBytes("nature"), Bytes.toBytes("segment"), Bytes.toBytes(Segment_label(last_mag_id)))//save's last_mag_itcode
      }else{
        put.addColumn(Bytes.toBytes("nature"), Bytes.toBytes("segment"), Bytes.toBytes("NULL"))//save's last_mag_itcode
      }
      (new ImmutableBytesWritable, put)
    }).saveAsHadoopDataset(jobConf)

    //业务集团：pcg_dcg
    var pcg_dcg_label:Map[String,String] = Map()
    DBHelper.get_Pcg_dcg().toArray().foreach(item =>{
      pcg_dcg_label += (item.toString.split("#")(0)-> item.toString.split("#")(1))
    })
    val all_mag_pcg_dcg = pcg_dcg_label.keys.toList

    all_df.rdd.filter(_(1) != null).map(item=>{
      println("0 : " + item(0)+"" +"1:"+ item(1))
      val user:List[String] = List(item(0).toString,item(1).toString)
      val last_mag_id = factorial(user,all_user,all_mag_pcg_dcg)(1)
      //WRITE_BU
      val put = new Put(Bytes.toBytes(item(0)+""))//save's itcode

      if (pcg_dcg_label.contains(last_mag_id)) {
        put.addColumn(Bytes.toBytes("nature"), Bytes.toBytes("pcg_dcg"), Bytes.toBytes(pcg_dcg_label(last_mag_id)))//save's last_mag_itcode
      }else{
        put.addColumn(Bytes.toBytes("nature"), Bytes.toBytes("pcg_dcg"), Bytes.toBytes("NULL"))//save's last_mag_itcode
      }
      (new ImmutableBytesWritable, put)
    }).saveAsHadoopDataset(jobConf)

    sparkSession.stop()

  }
  def factorial(one:List[String],all_map:Map[String,String],all:List[String]):List[String]= {
    if (all.contains(one(1)) ) one
    else if(all.contains(one(0)) || one(1)=="yangyq") List(one(0),one(0))
    else {
      if(all_map.contains(one.last)){
        factorial(List(one.head,all_map(one.last)),all_map,all)
      }else List(one.head,one.last)
    }
  }
  def take_not_null(col1:String,col2:String): String ={
    if (("").equals(col1) || ("null").equals(col1.toLowerCase())  ){
      return col2
    }else return col1

  }
}
