package cn.doitedu.ml.bayes

import org.apache.log4j.{Level, Logger}
import org.apache.spark.ml.classification.NaiveBayesModel
import org.apache.spark.ml.linalg
import org.apache.spark.ml.linalg.Vectors
import org.apache.spark.sql.SparkSession

import scala.collection.mutable

object ChuguiYuce {

  def main(args: Array[String]): Unit = {



    Logger.getLogger("org.apache").setLevel(Level.WARN)
    val spark = SparkSession
      .builder()
      .appName("出轨预测")
      .master("local")
      .getOrCreate()

    val df = spark.read.option("header",true).csv("userprofile/data/chugui/test")
    df.createTempView("df")
    val featuresDF = spark.sql(
      """
        |
        |select
        |name,
        |case
        |  when job='老师' then 1.0
        |  when job='程序员' then 2.0
        |else 3.0
        |end as job,
        |
        |case
        | when cast(income as double) <10000 then 1.0
        | when cast(income as double) between 10000 and 20000 then 2.0
        | else 3.0
        |end as income ,
        |
        |case
        |  when age='青年' then 1.0
        |  when age='中年' then 2.0
        |else 3.0
        |end as age,
        |
        |if(sex ='男',1.0,2.0) as sex
        |
        |from df
        |
        |""".stripMargin)

    featuresDF.show(100,false)
    featuresDF.createTempView("ft")


    // 将原生  特征数组，变成 mllib中要求的Vector类型
    val arr2Vec = (arr:mutable.WrappedArray[Double])=>{
      // Vector是一个接口，它有两个实现，一个是DenseVector，一个是SparseVector
      val vector: linalg.Vector = Vectors.dense(arr.toArray)
      vector
    }

    spark.udf.register("arr2vec",arr2Vec)


    val vec = spark.sql(
      """
        |
        |select
        |name,
        |arr2vec(array(cast(job as double),cast(income as double),cast(age as double), cast(sex as double))) as features
        |
        |from ft
        |""".stripMargin)

    val model = NaiveBayesModel.load("userprofile/data/chugui/model")
    val predict = model.transform(vec)

    predict.show(100,false)

    spark.close()

  }

}
