package com.shujia.mlib

import org.apache.spark.mllib.linalg.distributed.RowMatrix
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.mllib.linalg.{Matrices, Matrix, Vectors}
import org.apache.spark.mllib.regression.LabeledPoint
import org.apache.spark.mllib.util.MLUtils

object Demo3LabeledPoint {
  def main(args: Array[String]): Unit = {


    val conf = new SparkConf().setMaster("local").setAppName("Demo3LabeledPoint")

    val sc = new SparkContext(conf)


    //一条数据集  有多个x  和一个y构成
    val point = LabeledPoint(1.0, Vectors.dense(1.0, 2.0, 3.0))

    val point1 = LabeledPoint(1.0, Vectors.sparse(3, Array(1, 2), Array(2.0, 3.0)))


    //加载svm格式数据
    val points = MLUtils.loadLibSVMFile(sc, "spark/data/人体指标.txt")
    points.foreach(println)


    //稠密矩阵
    val dm: Matrix = Matrices.dense(3, 2, Array(1.0, 3.0, 5.0, 2.0, 4.0, 6.0))

    println(dm)
    println(dm.isTransposed)
    println(dm.numCols)
    println(dm.numRows)
    println(dm.transpose)



    val rdd = sc.parallelize(Array(
      Array[Double](1, 2, 3, 4, 5, 6),
      Array[Double](1, 2, 3, 4, 5, 6),
      Array[Double](1, 2, 3, 4, 5, 6),
      Array[Double](1, 2, 3, 4, 5, 6)
    ))


    //将没一行转换成向量
    val vData = rdd.map(arr => Vectors.dense(arr))


    //分布式矩阵
    val mat: RowMatrix = new RowMatrix(vData)

    println(mat)


  }
}
