package bayes


import org.apache.log4j.{Level, Logger}
import org.apache.spark.ml.linalg.DenseVector
import org.apache.spark.mllib.linalg.{DenseVector=>LibDenseVector}
import org.apache.spark.mllib.linalg
import org.apache.spark.mllib.linalg.distributed.RowMatrix
import org.apache.spark.mllib.linalg.{SparseVector=>LibSparseVector, Vectors}
import org.apache.spark.mllib.regression.LabeledPoint
import org.apache.spark.mllib.util.MLUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

/**
  * Created by hunter.coder 涛哥  
  * 2019/5/3 15:34
  * 交流qq:657270652
  * Version: 1.0
  * 更多学习资料：https://blog.csdn.net/coderblack/
  * Description:
  **/
object VectorDemo {
  def main(args: Array[String]): Unit = {

    Logger.getLogger("org").setLevel(Level.WARN)

    val spark = SparkSession.builder().appName("itemcb").master("local[*]").getOrCreate()

    /*spark.read.textFile()

    MLUtils.loadLabeledPoints(spark.sparkContext,"")*/
    val p1 = LabeledPoint(1.0,Vectors.dense(Array(1.0,1.0,1.0,1.0)))
    val p2 = LabeledPoint(0.0,Vectors.dense(Array(2.0,0.0,2.0,2.0)))
    val p3 = LabeledPoint(1.0,Vectors.dense(Array(1.0,0.0,1.0,1.0)))
    val p4 = LabeledPoint(0.0,Vectors.dense(Array(1.0,2.0,2.0,2.0)))
    val p5 = LabeledPoint(1.0,Vectors.dense(Array(0.0,1.0,1.0,1.0)))

    import spark.implicits._
    val rows: RDD[linalg.Vector] = spark.sparkContext.parallelize(Seq(p1.features,p2.features,p3.features,p4.features,p5.features))

    val mat = new RowMatrix(rows,5,4)

    println(mat.formatted(","))


    new DenseVector(Array(2.0,3.0,4.0))
    new LibDenseVector(Array(2.1,3.2,4.5))

    println(new LibSparseVector(3, Array(0, 2), Array(2, 3)))


  }
}
