package com.shujia.mllib

import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.ml
import org.apache.spark.ml.linalg.Vectors
import org.apache.spark.ml.regression.{LinearRegression, LinearRegressionModel}
import org.apache.spark.sql.expressions.UserDefinedFunction

object Demo1Line {
  def main(args: Array[String]): Unit = {
    //1、创建spark环境
    val spark: SparkSession = SparkSession
      .builder()
      .master("local")
      .appName("line")
      .getOrCreate()

    import spark.implicits._
    import org.apache.spark.sql.functions._

    //2、读取原始数据
    val lines: DataFrame = spark
      .read
      .format("csv")
      .option("sep", ",")
      .schema("label DOUBLE, x DOUBLE")
      .load("spark/data/lines.txt")

    //spark sql的自定义函数
    def xToVector: UserDefinedFunction = udf((x: Double) => {
      //将x转换成向量
      Vectors.dense(Array(x))
    })

    //3、特征工程
    //将原始的数据转换成向量
    val tranDF: DataFrame = lines.select($"label", xToVector($"x") as "features")


    //4、选择算法
    val lr: LinearRegression = new LinearRegression()


    //5、将数据带入算法训练模型
    val model: LinearRegressionModel = lr.fit(tranDF)


    println(s"截距：${model.intercept}")
    println(s"权重：${model.coefficients}")

    //对新的数据做预测
    println(model.predict(Vectors.dense(100)))


  }
}
