package cn.doitedu.profile.ml.logisticregression

import org.apache.spark.ml.classification.LogisticRegression
import org.apache.spark.ml.linalg.Vectors
import org.apache.spark.sql.SparkSession

/**
 * @author 涛哥
 * @nick_name "deep as the sea"
 * @contact qq:657270652 wx:doit_edu
 * @site www.doitedu.cn
 * @date 2021-06-24
 * @desc 流失风险预测
 */
object LossPredictTrainner {

  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName("")
      .master("local")
      .config("spark.sql.shuffle.partitioins", "1")
      .getOrCreate()

    import spark.implicits._

    // 加载训练集
    val sample1 = spark.read.options(Map("header"->"true","inferSchema"->"true")).csv("profile/data/loss/sample")

    // 加载测试集
    val sample2 = spark.read.options(Map("header"->"true","inferSchema"->"true")).csv("profile/data/loss/test")

    // 样本数据特征向量化
    val sample = sample1.union(sample2).map(row=>{
      // label,guid,cs_3,cs_15,xf_3,xf_15,th_3,th_15,hp_3,hp_15,cp_3,cp_15,last_dl,last_xf

      val label = row.getAs[Double]("label")
      val guid = row.getAs[Double]("guid")

      val featureArray = Array(
        row.getAs[Double]("cs_3")     ,
        row.getAs[Double]("cs_15")    ,
        row.getAs[Double]("xf_3")     ,
        row.getAs[Double]("xf_15")    ,
        row.getAs[Double]("th_3")     ,
        row.getAs[Double]("th_15")    ,
        row.getAs[Double]("hp_3")     ,
        row.getAs[Double]("hp_15")    ,
        row.getAs[Double]("cp_3")     ,
        row.getAs[Double]("cp_15")    ,
        row.getAs[Double]("last_dl")  ,
        row.getAs[Double]("last_xf")  )

      (label,guid,Vectors.dense(featureArray))
    }).toDF("label","guid","features")


    val arr = sample.randomSplit(Array(0.8, 0.2))
    val train = arr(0)
    val test = arr(1)


    // 构造算法
    val lr = new LogisticRegression()
      .setFeaturesCol("features")
      .setLabelCol("label")

    // 训练模型
    val model = lr.fit(train)


    // 预测
    val result = model.transform(test)
    result.show(100,false)



    spark.close()



  }

}
