package com.csw.mllib

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.mllib.linalg
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.regression.LabeledPoint
import org.apache.spark.mllib.util.MLUtils
import org.apache.spark.rdd.RDD

object Demo01Vertor {
  def main(args: Array[String]): Unit = {

    /**
      * 向量：有方向有大小
      */

    //稠密向量
    val dense: linalg.Vector = Vectors.dense(Array(1.0, 2.3, 4.0, 5.0, 0.0, 0.0, 0.0, 4.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0))

//    println(dense)


    /**
      * 稀疏向量在0 较多的情况下占用空间更小
      */
    //稀疏向量
    val sparse: linalg.Vector = Vectors.sparse(17, Array(0, 1, 2, 3, 7), Array(1.0, 2.3, 4.0, 5.0, 4.0))

//    println(sparse)

    /**
      * 相互转换
      *
      */
//    println(sparse.toDense)

    /**
      * 标记数据，一条训练数据
      *
      */
    val pos: LabeledPoint = LabeledPoint(1.0, Vectors.dense(1.0, 0.0, 3.0))

//    println(pos)

    val neg: LabeledPoint = LabeledPoint(0.0,Vectors.sparse(3,Array(0,2),Array(1.0,3.0)))

//    println(neg)

    val conf: SparkConf = new SparkConf().setAppName("vertor").setMaster("local")

    val sc: SparkContext = new SparkContext(conf)

    /**
      * 读取SVM格式的数据
      *
      */
    val data: RDD[LabeledPoint] = MLUtils.loadLibSVMFile(sc,"spark/data/人体指标.txt")

    data.foreach(println)
  }
}
