package cn.doitedu.profile.ml

import org.apache.spark.ml.linalg.Vectors
import org.apache.spark.ml.regression.LinearRegression
import org.apache.spark.sql.SparkSession

object LinearRegressionDemo {

  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName("")
      .master("local")
      .config("spark.sql.shuffle.partitioins", "1")
      .getOrCreate()

    import spark.implicits._


    //  加载训练集
    val train = spark.read.option("header", "true").option("inferSchema","true").csv("profile/data/linear/sample")

    // 把训练集向量化
   val trainFeatures =  train.map(row=>{
      // area,floor,price
      val area = row.getAs[Double]("area")
      val floor = row.getAs[Double]("floor")
      val price = row.getAs[Double]("price")

      (Vectors.dense(Array(area,floor)),price)
    }).toDF("features","price")


    // 构造线性回归算法
    val regression = new LinearRegression()
      .setFeaturesCol("features")
      .setLabelCol("price")

    // 用算法对训练集进行模型训练
    val model = regression.fit(trainFeatures)




    //  加载测试集
    val test = spark.read.option("header", "true").option("inferSchema","true").csv("profile/data/linear/test")

    // 把训练集向量化
    val testFeatures =  test.map(row=>{
      // area,floor,price
      val area = row.getAs[Double]("area")
      val floor = row.getAs[Double]("floor")
      val price = row.getAs[Double]("price")

      (Vectors.dense(Array(area,floor)),price)
    }).toDF("features","price")



    // 用模型对测试集进行预测
    val result = model.transform(testFeatures)
    result.show(100,false)


    spark.close()



  }
}
