package com.shujia.spark.mllib

import org.apache.spark.ml.feature.LabeledPoint
import org.apache.spark.ml.linalg.Vectors
import org.apache.spark.ml.regression.{LinearRegression, LinearRegressionModel}
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}

object Demo2Point {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .master("local")
      .appName("point")
      .getOrCreate()
    import spark.implicits._

    //读取数据
    val data: DataFrame = spark.read
      .format("csv")
      .schema("y DOUBLE,x DOUBLE")
      .option("sep", ",")
      .load("data/points.txt")

    //将原始的数据转成算法可以识别的结构
    val pointDF: DataFrame = data.map {
      case Row(y: Double, x: Double) => LabeledPoint(y, Vectors.dense(x))
    }.toDF()

    //构建算法
    //使用线性回归算法
    val lr = new LinearRegression()

    //将数据带入算法训练模型
    //内部不断训练迭代降低误差
    val model: LinearRegressionModel = lr.fit(pointDF)

    //模型：算法+权重
    println(s"intercept:${model.intercept}")
    println(s"coefficients:${model.coefficients}")

    //传入新的x预测y
    val y: Double = model.predict(Vectors.dense(100))
    println(y)

  }
}
