//package com.bj58.test
//
//import com.bj58.zhaopin.ai.util.DateTimeUtil
//import org.apache.commons.lang.StringUtils
//import org.apache.spark.ml.classification.NaiveBayes
//import org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
//import org.apache.spark.ml.feature.VectorAssembler
//import org.apache.spark.sql.SparkSession
//
///**
//  * created by zby on 2018/11/21
//  */
//object UserLossProbability {
//  private val TIME_STATE_0 = 0 //昨日
//  private val TIME_STATE_1 = 1 //近3天
//  private val TIME_STATE_2 = 2 //近7天
//  private val TIME_STATE_3 = 3 //近1个月
//  private val TIME_STATE_4 = 4 //近3个月
//  private val TIME_STATE_5 = 5 //3月以上
//
//
//  def main(args: Array[String]): Unit = {
//    val spark = SparkSession
//      .builder
//      .enableHiveSupport()
//      .appName("UserLossProbability")
//      .getOrCreate()
//    val dt = if (args != null && args.length > 0) args(0) else DateTimeUtil.yesterday("yyyyMMdd")
//
//    run(spark, dt)
//
//    spark.stop()
//  }
//
//  def run(spark: SparkSession, dt: String): Unit = {
//    val beforedt = DateTimeUtil.addDay(dt, "yyyyMMdd", -1)
//    val sevenDaysdt = DateTimeUtil.addDay(dt, "yyyyMMdd", -8)
//    registUdf(spark,beforedt)
//    val sql =
//      s"""
//         |select t3.uid,t4.is_vip,dayCount(t3.visitday) as daycount,timeTrans(t4.first_login_time) as firstdt,timeTrans(t4.last_login_time) as lastlogintime,timeTrans(t4.last_resume_detail_time) as lastresumetime,timeTrans(t4.last_click_invite_time) as lastinvitetime,timeTrans(t4.last_im_time) as lastimtime,t4.buy_power,t4.biz_consume_power,t3.label
//         |from(select t1.uid,t1.visitday,(case when t2.uid is null then 0 else 1 end) as label
//         |from (select uid,visitday from hdp_lbg_supin_defaultdb.ods_zcm_alluser_id where dt=$sevenDaysdt and visitday=1)t1
//         |left join
//         |(select uid from hdp_lbg_supin_defaultdb.ods_zcm_alluser_id where dt>$sevenDaysdt and dt<=$dt and visitday=1)t2 on t1.uid=t2.uid)t3
//         |join
//         |(select uid,is_vip,first_login_time,last_login_time,last_resume_detail_time,last_click_invite_time,last_im_time,buy_power,biz_consume_power from hdp_lbg_supin_defaultdb.ai_dw_zcm_user_tag)t4
//         |on t3.uid=t4.uid
//       """.stripMargin
//    val userDF = spark.sql(sql).repartition(256).cache()
//
//    val assembler = new VectorAssembler().setInputCols(Array("is_vip","daycount","firstdt","lastlogintime","lastresumetime","lastinvitetime","lastimtime","buy_power","biz_consume_power")).setOutputCol("features")
//    val dataset = assembler.transform(userDF)
//
//    /*val pca = new PCA().setInputCol("features").setOutputCol("features2").setK(6)
//    val pcamodel = pca.fit(dataset)
//    val dataset2 = pcamodel.transform(dataset)*/
//    //val Array(train,test) = dataset2.randomSplit(Array(0.7,0.3))
//    val Array(train,test) = dataset.randomSplit(Array(0.7,0.3))
//
//    val bayes = new NaiveBayes().setFeaturesCol("features").setLabelCol("label")
//    val model = bayes.fit(train)
//    val result = model.transform(test)
//    result.show(50)
//
//    val evaluator = new MulticlassClassificationEvaluator()
//      .setLabelCol("label")
//      .setPredictionCol("prediction")
//      .setMetricName("accuracy")
//    val accuracy = evaluator.evaluate(result)
//    //result.where("prediction=0").show(50)
//    println(s"""accuracy is $accuracy""")
//  }
//
//  def registUdf(spark:SparkSession,beforedt:String): Unit ={
//    spark.udf.register("timeTrans", (str: String) => {
//      if (StringUtils.isNotBlank(str)) {
//        var s = ""
//        if(str.length>8){
//          s = str.substring(0,7)
//        }else{
//          s=str
//        }
//        val diff = DateTimeUtil.dayDiff(DateTimeUtil.str2Date(s, "yyyyMMdd"), DateTimeUtil.str2Date(beforedt, "yyyyMMdd"))
//        var timestage = TIME_STATE_5
//        if (diff <= 0) {
//          timestage = TIME_STATE_0
//        } else if (diff > 0 && diff <= 3) {
//          timestage = TIME_STATE_1
//        } else if (diff > 3 && diff <= 7) {
//          timestage = TIME_STATE_2
//        } else if (diff > 7 && diff <= 30) {
//          timestage = TIME_STATE_3
//        } else if (diff > 30 && diff <= 90) {
//          timestage = TIME_STATE_4
//        } else {
//          timestage = TIME_STATE_5
//        }
//        timestage
//      }else{
//        TIME_STATE_5
//      }
//    })
//
//    spark.udf.register("dayCount", (str: String) => {
//      var daynum = 3
//      if(StringUtils.isNotBlank(str)){
//          val num = str.toInt
//          if(num>=1 && num<=3){
//            daynum=0
//          }else if(num>3 && num<=6){
//            daynum = 1
//          }else{
//            daynum = 3
//          }
//        }
//      daynum
//    })
//  }
//}
