package com.fwmagic.spark.ml.loss

import com.fwmagic.spark.util.SparkUtils
import org.apache.spark.ml.classification.{LogisticRegression, LogisticRegressionModel}
import org.apache.spark.ml.feature.{MinMaxScaler, MinMaxScalerModel}
import org.apache.spark.sql.{DataFrame, SparkSession}

/**
 * 通过已有模型进行预测
 */
object ModelPredict {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkUtils.getSparkSession(this.getClass.getSimpleName)
    import spark.implicits._

    //读取待预测数据
    val predictDF: DataFrame = spark.read.options(Map("header" -> "true", "inferSchema" -> "true")).csv("data/loss_predict/liushi_to_predict.csv")

    predictDF.show(100, false)

    import com.fwmagic.spark.ml.utils.VectorUtils._
    //特征向量化
    spark.udf.register("arr2vec", arr2vec)

    val predictVecs: DataFrame = predictDF.selectExpr(
      "gid",
      "arr2vec(array(3_cs,15_cs,3_xf,15_xf,3_th,15_th,3_hp,15_hp,3_cp,15_cp,last_dl,last_xf)) as vec"
    )

    // 特征加工
    val scaler: MinMaxScaler = new MinMaxScaler()
      .setInputCol("vec")
      .setOutputCol("features")

    val model: MinMaxScalerModel = scaler.fit(predictVecs)

    val scalerDF: DataFrame = model.transform(predictVecs)

    scalerDF.show(100, false)

    //加载训练好的逻辑回归算法模型
    val logisticRegressionModel: LogisticRegressionModel = LogisticRegressionModel.load("data/loss_predict/model")

    //预测
    val prdictResult: DataFrame = logisticRegressionModel.transform(scalerDF)

    prdictResult.selectExpr("gid","probability","prediction").show(100, false)

    spark.close()
  }

}
