package cn.doitedu.ml.doit13.losspre

import cn.doitedu.commons.util.SparkUtil
import org.apache.spark.ml.classification.LogisticRegressionModel
import org.apache.spark.ml.feature.MinMaxScaler

/**
 * 利用训练好的模型，来对大量未知数据预测流失概率
 */
object ModelPredict {

  def main(args: Array[String]): Unit = {


    val spark = SparkUtil.getSparkSession("")
    val test = spark.read.options(Map(("header","true"),("inferSchema","true"))).csv("userprofile/data/loss_predict/test/liushi_to_predict.csv")

    import cn.doitedu.ml.util.VecUtil._
    spark.udf.register("arr2vec",arr2Vec)


    // 向量化
    val vecs = test.selectExpr(
      "gid",
      "arr2vec(array(3_cs,15_cs,3_xf,15_xf,3_th,15_th,3_hp,15_hp,3_cp,15_cp,last_dl,last_xf)) as vec"
    )
    // 规范化
    val scaler = new MinMaxScaler()
      .setInputCol("vec")
      .setOutputCol("features")
    val scalerModel = scaler.fit(vecs)
    val scaleVecs = scalerModel.transform(vecs)

    // 加载模型
    val model = LogisticRegressionModel.load("userprofile/data/loss_predict/model")

    // 用模型预测未知数据
    val res = model.transform(scaleVecs).select("gid","probability","prediction")


    res.show(100,false)

    spark.close()

  }

}
