package com.timeriver.machine_learning.regression

import org.apache.spark.ml.evaluation.RegressionEvaluator
import org.apache.spark.ml.feature.LabeledPoint
import org.apache.spark.ml.linalg.Vectors
import org.apache.spark.ml.regression.{DecisionTreeRegressionModel, DecisionTreeRegressor}
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}

object DecisionTreeAlg {
  def main(args: Array[String]): Unit = {

    val session: SparkSession = SparkSession.builder()
      .master("local[6]")
      .appName("决策树回归算法")
      .getOrCreate()

    import session.implicits._

    val ds: Dataset[String] = session.read
      .textFile("D:\\workspace\\gitee_space\\spark-ml-machine-learning\\data\\housing.data")

    val value: Dataset[LabeledPoint] = ds.map(_.trim).filter(line => !line.isEmpty)
      .map(line => {
        val array: Array[Double] = line.split("\\s+").map(_.toDouble)
        LabeledPoint(
          array.last,
          Vectors.dense(array.slice(1, array.size-1))
        )
      })

    val Array(train, test) = value.randomSplit(Array(0.7, 0.3), 123)

    val regressor = new DecisionTreeRegressor().setMaxDepth(5)

    val model: DecisionTreeRegressionModel = regressor.fit(train)

    val frame: DataFrame = model.transform(test)

    frame.show(5, false)

    val evaluator: RegressionEvaluator = new RegressionEvaluator().setLabelCol("label")
      .setPredictionCol("prediction")
      .setMetricName("rmse")
    val rmse: Double = evaluator.evaluate(frame)

    println(s"决策树回归RMSE=》 ${rmse}")

    session.stop()
  }
}
