package com.shujia.ml

import org.apache.spark.SparkContext
//import org.apache.spark.mllib.regression.LabeledPoint
//import org.apache.spark.mllib.linalg.Vectors

import org.apache.spark.ml.feature.LabeledPoint
import org.apache.spark.ml.linalg.Vectors

import org.apache.spark.mllib.util.MLUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SparkSession}

object Code03LibSVM {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .master("local")
      .appName("spark")
      .getOrCreate()

    val sc: SparkContext = spark.sparkContext
    val data: RDD[String] = sc.textFile("spark_code/data/ml/data.txt")


    //    val training = spark.read.format("libsvm")
    //      .load("data/mllib/sample_linear_regression_data.txt")

    import spark.implicits._
    val trans: DataFrame = data.map(
      line => {
        val splitRes: Array[String] = line.split(",")
        LabeledPoint(splitRes(3).toDouble, Vectors.dense(splitRes(0).toDouble, splitRes(1).toDouble, splitRes(2).toDouble))
      }

    ).toDF("label", "features")

    trans.show()
//
//        trans
//          .write
//          .format("libsvm")
//          .save("spark_code/data/ml/data_lib_svm")

//    val labeledPointRDD: RDD[LabeledPoint] = data.map(
//      line => {
//        val splitRes: Array[String] = line.split(",")
//        LabeledPoint(splitRes(3).toDouble, Vectors.dense(splitRes(0).toDouble, splitRes(1).toDouble, splitRes(2).toDouble))
//      }
//
//    )

//    MLUtils.saveAsLibSVMFile(labeledPointRDD,"spark_code/data/ml/rdd_data_lib_svm")


    val data_lib_svm = spark.read.format("libsvm")
      .load("spark_code/data/ml/data_lib_svm")
    data_lib_svm.show(truncate = false)


  }
}
