package com.shujia.ml


import org.apache.spark.SparkContext
import org.apache.spark.ml.feature.LabeledPoint
import org.apache.spark.ml.linalg.Vectors
import org.apache.spark.ml.regression.LinearRegression
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SparkSession}

object Code02LinearRegression {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .master("local")
      .appName("spark")
      .getOrCreate()

    val sc: SparkContext = spark.sparkContext
    val data: RDD[String] = sc.textFile("spark_code/data/ml/data.txt")


//    val training = spark.read.format("libsvm")
//      .load("data/mllib/sample_linear_regression_data.txt")

    import spark.implicits._
    val trans: DataFrame = data.map(
      line => {
        val splitRes: Array[String] = line.split(",")
        LabeledPoint(splitRes(3).toDouble, Vectors.dense(splitRes(0).toDouble, splitRes(1).toDouble, splitRes(2).toDouble))
      }

    ).toDF("label", "features")



    val lr = new LinearRegression()
      .setMaxIter(10)
      .setRegParam(0.3)
      .setElasticNetParam(0.8)

    // Fit the model
    val lrModel = lr.fit(trans) // 训练模型

    val trainingSummary = lrModel.summary
    println(s"RMSE: ${trainingSummary.rootMeanSquaredError}") // 均方误差
    println(s"r2: ${trainingSummary.r2}") // R方

    val predictRes: Double = lrModel.predict(Vectors.dense(53, 86, 75)) // 对于指定的数据进行预测操作
    println(s"predictRes:$predictRes")

  }
}
