package com.lenovo.ukr

import java.util.Properties
import java.sql.DriverManager

import com.lenovo.jdbc.DBHelper
import org.apache.hadoop.hbase.HBaseConfiguration
import org.apache.hadoop.hbase.client.Result
import org.apache.hadoop.hbase.io.ImmutableBytesWritable
import org.apache.hadoop.hbase.mapred. TableOutputFormat
import org.apache.hadoop.hbase.mapreduce.TableInputFormat
import org.apache.hadoop.hbase.util.Bytes
import org.apache.hadoop.mapred.JobConf
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.regression.LabeledPoint
import org.apache.spark.mllib.tree.RandomForest
import org.apache.spark.mllib.tree.model.RandomForestModel
import org.apache.spark.sql.SparkSession

object spark_ml_l0_model {

  case class Person(target: Int, band: Int, work_years: Int, country: Int, urgency: Int, SR: Int, QI: Int,complaint: Int,compliment: Int,ecc_freq : Int,ti_dis_1 : Int ,ti_dis_2 : Int ,ti_dis_3 : Int ,ti_dis_4 : Int)

  def main(args: Array[String]): Unit = {
    val sparkSession = SparkSession.builder.master("yarn").appName("spark_ml_l0_model").enableHiveSupport().getOrCreate()
    val sc = sparkSession.sparkContext


    // 创建hbase configuration
    val hbase_conf = HBaseConfiguration.create()
    val tablename = "upp:upp_user_profile"
    val jobConf = new JobConf(hbase_conf)
    jobConf.setOutputFormat(classOf[TableOutputFormat])
    jobConf.set(TableInputFormat.INPUT_TABLE, tablename)


    // 从数据源获取数据
    val hbase_date_rdd = sc.newAPIHadoopRDD(jobConf,classOf[TableInputFormat],classOf[ImmutableBytesWritable],classOf[Result])
    println("*****************1*****************")
    println(hbase_date_rdd.map{case (_,result) =>{
      //获取行键
      Bytes.toString(result.getRow)
    }}.collect().toList.size)

    var train_user:Map[String,String] = Map()

    val all_0 = hbase_date_rdd.map{case (_,result) =>{
      //获取行键
      val itcode = Bytes.toString(result.getRow)
      val L0_readtimes =  Bytes.toString(result.getValue("l0_document_property".getBytes,"reading_times".getBytes))
      println("*****************2*****************")
      println(itcode +":"+ L0_readtimes)
      (itcode,level2num(L0_readtimes))
    }}.filter(_._2.toString =="0").map(item =>(item._1.toString+"#"+item._2.toString)).collect().toList.take(2500).foreach(item =>{
      println("*****************3*****************")
      println(item.split("#")(0) +":"+ item.split("#")(1))
      train_user += ( item.split("#")(0) -> item.split("#")(1) )
    })

    val all_1 = hbase_date_rdd.map{case (_,result) =>{
      //获取行键
      val itcode = Bytes.toString(result.getRow)
      val L0_readtimes =  Bytes.toString(result.getValue("l0_document_property".getBytes,"reading_times".getBytes))
      (itcode,level2num(L0_readtimes))
    }}.filter(_._2.toString =="1").map(item =>(item._1.toString+"#"+item._2.toString)).collect().toList.take(2500).foreach(item =>{
      train_user += (item.split("#")(0) ->item.split("#")(1) )
    })


    println(train_user.keys.size)


    import sparkSession.sqlContext.implicits._
    //将标记数据构造为算法所需要的数据类型格式
    val all_test_user_df=hbase_date_rdd.map{case (_,result) =>{
      //获取行键
      val itcode = Bytes.toString(result.getRow)
      val band = Bytes.toString(result.getValue("nature".getBytes,"band".getBytes))
      val work_years = Bytes.toString(result.getValue("nature".getBytes,"work_years".getBytes))
      val country = Bytes.toString(result.getValue("nature".getBytes,"country".getBytes))
      val urgency = Bytes.toString(result.getValue("attitude".getBytes,"urgency".getBytes))
      val complaint = Bytes.toString(result.getValue("attitude".getBytes,"complaint".getBytes))
      val compliment = Bytes.toString(result.getValue("attitude".getBytes,"praise".getBytes))
      val SR =  Bytes.toString(result.getValue("behavior".getBytes,"ticket_sr".getBytes))
      val QI = Bytes.toString(result.getValue("behavior".getBytes,"ticket_qi".getBytes))
      val L0_readtimes =  Bytes.toString(result.getValue("l0_document_property".getBytes,"reading_times".getBytes))

      val ecc_freq = Bytes.toString(result.getValue("system".getBytes,"ECC_freq".getBytes))
      val ticket_distribution = Bytes.toString(result.getValue("behavior".getBytes,"ticket_distribution".getBytes))
      var a,b,c,d = ""
      ticket_distribution.split(",").foreach(
        item2=> {
          if (item2.split("#").length == 2){
            val key = item2.split("#")(0)
            val value = item2.split("#")(1)
            if (key == "Business_Application") {
              a = value
            } else if (key == "Communication") {
              b = value
            } else if (key == "Network_Voice") {
              c = value
            }else if (key == "Desktop_Software") {
              c = value
            }
          }
        }
      )
      List(itcode,convert_band(band),convert_work_years(work_years), convert_country(country), convert_urgency(urgency), convert_sr(SR),convert_qi(QI), convert_complaint(complaint),convert_compliment(compliment),
        cover_ecc_freq(ecc_freq),cover_ticket_distribution(a),cover_ticket_distribution(b),cover_ticket_distribution(c),cover_ticket_distribution(d)
      )
    }}.filter(item=> {train_user.contains(item(0)) && !"".equals(item(0))})
      .map(item =>{Person((train_user(item(0))).toInt,item(1).toInt,item(2).toInt,item(3).toInt,item(4).toInt,item(5).toInt,item(6).toInt,item(7).toInt,item(8).toInt,item(9).toInt,item(10).toInt,item(11).toInt,item(12).toInt,item(13).toInt)
      }).toDF()

    all_test_user_df.show()


    val featInd = all_test_user_df.columns.diff(List("target")).map(all_test_user_df.columns.indexOf(_))
    // Get index of target
    val targetInd = all_test_user_df.columns.indexOf("target")
    val data = all_test_user_df.rdd.map(r => LabeledPoint(
      // Get target value
      r.getInt(targetInd),
      // Map feature indices to values
      Vectors.dense(featInd.map(r.getInt(_).toDouble))
    ))
    val splits = data.randomSplit(Array(0.7, 0.3))
    val (trainingData, testData) = (splits(0), splits(1))
    // Train a RandomForest model.
    // Empty categoricalFeaturesInfo indicates all features are continuous.
    val numClasses = 3
    val categoricalFeaturesInfo = Map[Int, Int]()
    val numTrees = 400 // Use more in practice.
    val featureSubsetStrategy = "auto" // Let the algorithm choose.
    val impurity = "gini"
    val maxDepth = 4
    val maxBins = 100

    val model = RandomForest.trainClassifier(trainingData, numClasses, categoricalFeaturesInfo,
      numTrees, featureSubsetStrategy, impurity, maxDepth, maxBins)

    // Evaluate model on test instances and compute test error

    val labelAndPreds = testData.map { point =>
      val prediction = model.predict(point.features)
      (point.label, prediction)
    }
    val testErr = labelAndPreds.filter(r => r._1 != r._2).count.toDouble / testData.count()
    println("Test Error = " + testErr)
    println("Learned classification forest model:\n" + model.toDebugString)

    // Save and load model
    model.save(sc, "/user/p66_g98/test/l0_Model")
    val sameModel = RandomForestModel.load(sc, "/user/p66_g98/test/l0_Model")
    sparkSession.stop()
  }

  def convert_band(band: String): String = {
    var all_band: Map[String, String] = Map()
    all_band += ("Slovakia" -> 1.toString)
    all_band += ("1" -> "1")
    all_band += ("2" -> "2")
    all_band += ("3" -> "3")
    all_band += ("4" -> "4")
    all_band += ("5" -> "5")
    all_band += ("6" -> "6")
    all_band += ("7" -> "7")
    all_band += ("8" -> "8")
    all_band += ("9" -> "9")
    all_band += ("10" -> "10")
    all_band += ("China Contractor (inactive)" -> "0")
    all_band += ("CEO" -> "15")
    all_band += ("10 P" -> "10")
    all_band += ("Brazil Corporate Administration (inactive)" -> "0")
    all_band += ("EVP" -> "14")
    all_band += ("SVP" -> "13")
    all_band += ("VP" -> "12")
    all_band += ("Contract" -> "0")
    all_band += ("ED" -> "11")
    if (all_band.contains(band)) return all_band(band).toString
    else if (band == null || ("").equals(band) || ("null").equals(band.toLowerCase)) return "0"
    else return "20"
  }

  def convert_work_years(working_years: String): String = {
    if (working_years == "5+") return "11"
    else if (working_years == null || ("").equals(working_years) || ("null").equals(working_years.toLowerCase)) return "0"
    else return (working_years.toDouble * 2).toInt.toString
  }

  def convert_urgency(urgency:String):String={
    if(urgency == "In Last Three Month") return "3"
    else if(urgency == "In Last Six Month") return "2"
    else if(urgency == "In Last Year") return "1"
    else  return "0"
  }
  def convert_complaint(compliment:String):String={
    if(compliment == "In Last Three Month") return "3"
    else if(compliment == "In Last Six Month") return "2"
    else if(compliment == "In Last Year") return "1"
    else  return "0"
  }
  def convert_compliment(compliment:String):String={
    if(compliment == "In Last Three Month") return "3"
    else if(compliment == "In Last Six Month") return "2"
    else if(compliment == "In Last Year") return "1"
    else  return "0"
  }
  def convert_sr(sr: String): String = {
    if (sr == "Y") return "1"
    else return "0"
  }

  def convert_qi(qi: String): String = {
    if (qi == "Y") return "1"
    else return "0"
  }

  def convert_L0_readtimes(times: String): String = {
    if (times == "High") return "1"
    else if (times == "Medium") return "1"
    else if (times == "Low") return "1"
    else return "0"
  }

  def convert_ticket_distribution(distribution: String): String = {

    return "0"
  }
  def cover_ecc_freq(freq:String): String ={
    if(freq == "High") return "4"
    else if(freq == "Medium") return "3"
    else if(freq == "Low") return "2"
    else if(freq == "Inactive") return "1"
    else  return "0"
  }

  def cover_ticket_distribution(freq:String): String ={
    if(freq == "High") return "3"
    else if(freq == "Medium") return "2"
    else if(freq == "Low") return "1"
    else  return "0"
  }

  def convert_country(country: String): String = {
    var all_country: Map[String, String] = Map()
    all_country += ("Slovakia" -> 1.toString)
    all_country += ("UnitedKingdom" -> 2.toString)
    all_country += ("Denmark" -> 3.toString)
    all_country += ("CN" -> 4.toString)
    all_country += ("MY" -> 5.toString)
    all_country += ("Japan" -> 6.toString)
    all_country += ("US" -> 7.toString)
    all_country += ("Slovenia" -> 8.toString)
    all_country += ("Spain" -> 9.toString)
    all_country += ("Indonesia" -> 10.toString)
    all_country += ("Austria" -> 11.toString)
    all_country += ("JP" -> 12.toString)
    all_country += ("GB" -> 13.toString)
    all_country += ("Singapore" -> 14.toString)
    all_country += ("UnitedStatesofAmerica" -> 15.toString)
    all_country += ("India" -> 16.toString)
    all_country += ("Brazil" -> 17.toString)
    all_country += ("BR" -> 18.toString)
    all_country += ("China" -> 19.toString)
    all_country += ("Mexico" -> 20.toString)
    all_country += ("IN" -> 21.toString)
    all_country += ("Philippines" -> 22.toString)
    all_country += ("UnitedArabEmirates" -> 23.toString)
    all_country += ("Argentina" -> 24.toString)
    all_country += ("MX" -> 25.toString)
    all_country += ("Canada" -> 26.toString)
    all_country += ("Taiwan" -> 27.toString)
    all_country += ("France" -> 28.toString)
    all_country += ("SouthAfrica" -> 29.toString)
    all_country += ("SaudiArabia" -> 30.toString)
    all_country += ("TW" -> 31.toString)
    all_country += ("Colombia" -> 32.toString)
    all_country += ("Malaysia" -> 33.toString)
    all_country += ("RussianFederation" -> 34.toString)
    all_country += ("Peru" -> 35.toString)
    all_country += ("HK" -> 36.toString)
    all_country += ("Germany" -> 37.toString)
    all_country += ("Switzerland" -> 38.toString)
    all_country += ("KZ" -> 39.toString)
    all_country += ("Australia" -> 40.toString)
    all_country += ("Israel" -> 41.toString)
    all_country += ("DE" -> 42.toString)
    all_country += ("Romania" -> 43.toString)
    all_country += ("Norway" -> 44.toString)
    all_country += ("Chile" -> 45.toString)
    all_country += ("CO" -> 46.toString)
    all_country += ("PL" -> 47.toString)
    all_country += ("Sweden" -> 48.toString)
    all_country += ("RU" -> 49.toString)
    all_country += ("UA" -> 50.toString)
    all_country += ("Finland" -> 51.toString)
    all_country += ("Italy" -> 52.toString)
    all_country += ("RO" -> 53.toString)
    all_country += ("Ireland" -> 54.toString)
    all_country += ("Ukraine" -> 55.toString)
    all_country += ("Belgium" -> 56.toString)
    all_country += ("Netherlands" -> 57.toString)
    all_country += ("SK" -> 58.toString)
    all_country += ("Portugal" -> 59.toString)
    all_country += ("PH" -> 60.toString)
    all_country += ("AR" -> 61.toString)
    all_country += ("FR" -> 62.toString)
    all_country += ("HongKong" -> 63.toString)
    all_country += ("Thailand" -> 64.toString)
    all_country += ("Morocco" -> 65.toString)
    all_country += ("CL" -> 66.toString)
    all_country += ("Venezuela" -> 67.toString)
    all_country += ("HU" -> 68.toString)
    all_country += ("MA" -> 69.toString)
    all_country += ("TH" -> 70.toString)
    all_country += ("SA" -> 71.toString)
    all_country += ("Turkey" -> 72.toString)
    all_country += ("CzechRepublic" -> 73.toString)
    all_country += ("ZA" -> 74.toString)
    all_country += ("ID" -> 75.toString)
    all_country += ("GR" -> 76.toString)
    all_country += ("CA" -> 77.toString)
    all_country += ("Egypt" -> 78.toString)
    all_country += ("SV" -> 79.toString)
    all_country += ("AE" -> 80.toString)
    all_country += ("SG" -> 81.toString)
    all_country += ("TR" -> 82.toString)
    all_country += ("AU" -> 83.toString)
    all_country += ("Kazakhstan" -> 84.toString)
    all_country += ("Poland" -> 85.toString)
    all_country += ("SE" -> 86.toString)
    all_country += ("IE" -> 87.toString)
    all_country += ("ES" -> 88.toString)
    all_country += ("SI" -> 89.toString)
    all_country += ("KR" -> 90.toString)
    all_country += ("CZ" -> 91.toString)
    all_country += ("CH" -> 92.toString)
    all_country += ("EG" -> 93.toString)
    all_country += ("Vietnam" -> 94.toString)
    all_country += ("Hungary" -> 95.toString)
    all_country += ("IL" -> 96.toString)
    all_country += ("PE" -> 97.toString)
    all_country += ("Pakistan" -> 98.toString)
    all_country += ("DK" -> 99.toString)
    all_country += ("NL" -> 100.toString)
    all_country += ("LT" -> 101.toString)
    all_country += ("LU" -> 102.toString)
    all_country += ("NG" -> 103.toString)
    all_country += ("BE" -> 104.toString)
    all_country += ("NO" -> 105.toString)
    all_country += ("Serbia" -> 106.toString)
    all_country += ("Croatia" -> 107.toString)
    all_country += ("Nigeria" -> 108.toString)
    all_country += ("AT" -> 109.toString)
    all_country += ("VE" -> 110.toString)
    all_country += ("IT" -> 111.toString)
    all_country += ("NewZealand" -> 112.toString)
    all_country += ("Greece" -> 113.toString)
    all_country += ("Korea,Republicof" -> 114.toString)
    all_country += ("Kenya" -> 115.toString)
    all_country += ("PT" -> 116.toString)
    all_country += ("HR" -> 117.toString)
    all_country += ("VN" -> 118.toString)
    all_country += ("BG" -> 119.toString)
    all_country += ("Bulgaria" -> 120.toString)
    all_country += ("KE" -> 121.toString)
    all_country += ("NZ" -> 122.toString)
    all_country += ("YU" -> 123.toString)
    all_country += ("PK" -> 124.toString)
    all_country += ("FI" -> 125.toString)
    all_country += ("GE" -> 126.toString)
    all_country += ("GT" -> 127.toString)
    all_country += ("AF" -> 128.toString)
    all_country += ("TN" -> 129.toString)
    if (all_country.contains(country)) return all_country(country).toString
    else if (country == null || ("").equals(country) || ("null").equals(country.toLowerCase)) return "0"
    else return "130"
  }

  def level2num(lev: String): Int = {
    if (lev == "High") return 1
    else if (lev == "Medium") return 1
    else if (lev == "Low") return 1
    else return 0
  }


}