package cn.doitedu.ml.examples

import cn.doitedu.commons.utils.SparkUtil
import org.apache.spark.ml.linalg.Vectors
import org.apache.spark.ml.regression.LinearRegression
import org.apache.spark.ml.evaluation.RegressionEvaluator
import org.apache.spark.sql.DataFrame

import scala.collection.mutable

/**
 * @Title: ${file_name}
 * @Package ${package_name}
 * @Description: 线性回归算法demo
 * @Author hunter@doitedu.cn
 * @date 2020/8/1811:15     
 */
object LinearRegressionDemo {
  def main(args: Array[String]): Unit = {

    val spark = SparkUtil.getSparkSession("线性回归算法示例")
    import spark.implicits._
    import org.apache.spark.sql.functions._

    val sample = spark.read.option("header","true").option("inferSchema","true").csv("portrait/testdata/linage/sample")
    val test = spark.read.option("header","true").option("inferSchema","true").csv("portrait/testdata/linage/test")

    val arr2Vec = udf((arr:mutable.WrappedArray[Double])=>{Vectors.dense(arr.toArray)})


    // 特征工程
    val sampleVecs = sample.select(arr2Vec(array('area,'floor)) as "vec",'price)
    val testVecs = test.select(arr2Vec(array('area,'floor)) as "vec",'label)


    // 构造算法工具，并设置参数
    val linearRegression = new LinearRegression()
      .setLabelCol("price")
      .setFeaturesCol("vec")
      .setRegParam(0.1)  // 正则化参数： 防止过拟合

    // 训练模型
    val model = linearRegression.fit(sampleVecs)
    val prediction: DataFrame = model.transform(testVecs)

    prediction.show(100,false)
    prediction.cache()

    // 模型评估
    val evaluator = new RegressionEvaluator()
      .setLabelCol("label")
      .setPredictionCol("prediction")
      .setMetricName("r2")
    val d: Double = evaluator.evaluate(prediction)
    println(d)

    val evaluator2 = new RegressionEvaluator()
      .setLabelCol("label")
      .setPredictionCol("prediction")
      .setMetricName("rmse")
    val d2: Double = evaluator2.evaluate(prediction)
    println(d2)


    val evaluator3 = new RegressionEvaluator()
      .setLabelCol("label")
      .setPredictionCol("prediction")
      .setMetricName("mse")
    val d3: Double = evaluator3.evaluate(prediction)
    println(d3)


    val evaluator4 = new RegressionEvaluator()
      .setLabelCol("label")
      .setPredictionCol("prediction")
      .setMetricName("mae")
    val d4: Double = evaluator4.evaluate(prediction)
    println(d4)

    spark.close()

  }
}
