package spark.chap4

import org.apache.spark.SparkContext
import org.apache.spark.mllib.linalg.distributed._
import org.apache.spark.mllib.linalg.{Matrices, Vectors}
import org.apache.spark.mllib.regression.LabeledPoint
import org.apache.spark.sql.SparkSession

/**
  * @author pinker on 2018/6/11
  */
object testVector {


  val hivePath = "D:/spark/hive/"

  private def getContext = {
    val spark = getSparkSession
    val sc = spark.sparkContext
    sc.setLogLevel("ERROR")
    sc
  }

  private def getSparkSession = {
    SparkSession
      .builder()
      .appName("labelPoint")
      .master("local[*]")
      .config("spark.sql.warehouse.dir", hivePath)
      .getOrCreate()
  }

  /**
    * 先列后行
    */


  def rowMatrix(sc: SparkContext): Unit = {
    val data = sc.textFile("src/main/resources/data/rowMatrix.txt")
      .map(_.split(' ').map(_.toDouble))
      .map(Vectors.dense)
    val rm = new RowMatrix(data)
    println(data + "\t" + rm)
    println(rm.numRows)
    println(rm.numCols)
    //计算列与列之间的相似度
    val coordinateMatrix = rm.columnSimilarities()
    println(coordinateMatrix.entries.foreach(println))
    //常用的几个统计
    val statics = rm.computeColumnSummaryStatistics()
    println("count:" + statics.count)
    println("max:" + statics.max)
    println("mean:" + statics.mean)
    println("min:" + statics.min)
    println("numNonzeros:" + statics.numNonzeros)
    println("variance:" + statics.variance)
    //计算协方差
    val covariance = rm.computeCovariance()
    println(covariance)
  }

  def indexedRowMatrix(sc: SparkContext): Unit = {
    val data = sc.textFile("src/main/resources/data/rowMatrix.txt")
      .map(line => line.split(" ").map(_.toDouble)).map(Vectors.dense(_))
      .map(vecs => new IndexedRow(vecs.size, vecs))
    val indexedRowMatrix = new IndexedRowMatrix(data)
    println(indexedRowMatrix)
    println(indexedRowMatrix.rows.foreach(println))
    val coordinateMatrix = indexedRowMatrix.toCoordinateMatrix()
    println(coordinateMatrix.entries.foreach(println))
  }

  def varianceDemo(spark: SparkSession): Unit = {
    val source = spark.sparkContext.parallelize(Seq(
      Array(1.0, 2),
      Array(3.0, 6),
      Array(4.0, 2),
      Array(5.0, 2)
    ))
    val rowMatrix = new RowMatrix(source.map(row => Vectors.dense(row)))
    println(rowMatrix.computeCovariance())
    println(rowMatrix.computeColumnSummaryStatistics().variance)
    println(2.25 * 2.25 + 0.25 * 0.25 + 0.75 * 0.75 + 1.75 * 1.75)
    println((2.25 * 2.25 + 0.25 * 0.25 + 0.75 * 0.75 + 1.75 * 1.75) / 4)
    println((2.25 * 2.25 + 0.25 * 0.25 + 0.75 * 0.75 + 1.75 * 1.75) / 3)
  }

  def distributedMatrix: Unit = {
    val sc = getContext
        rowMatrix(sc)
    /* indexedRowMatrix(sc)*/
    varianceDemo(getSparkSession)
    /*
    1,2  1-3.25
    3,6  3-3.25
    4,2  4-3.25
    5,2  5-3.25
     */

  }


  def localMatrix = {
    val matrixDense = Matrices.dense(2, 3, Array(1.0, 2.0, 3.0, 4.0, 5.0, 3.0))
    println(matrixDense)
    val matrixSparse = Matrices.sparse(3, 6, Array(0, 0, 2, 2, 3, 6, 8), Array(0, 1, 2, 0, 1, 2, 0, 1), Array(1.0, 2, 3, 3, 4, 8, 4, 5))
    println(matrixSparse)
  }

  def labelPointDemo1 = {
    val dv = Vectors.dense(Array(3.0, 4, 7, 9, 2))
    val sv = Vectors.sparse(6, Seq((2, 4.0), (4, 19.8)))
    val lp_1 = LabeledPoint(1.0, dv)
    val lp_2 = LabeledPoint(0, sv)
    println("label:\t" + lp_1.label + "\tvalues:\t" + lp_1.features)
    println("label:\t" + lp_2.label + "\tvalues:\t" + lp_2.features)
  }

  /**
    * ToDo:
    * 这个函数将实现利用MLUtil使得数据在LIBSVM文件何DataFrame(由Seq通过隐式转换获得)之间互相转换,
    *
    */
  def labelPointDemo2 = {

  }

  def main(args: Array[String]): Unit = {
    //    vectorsDemo
    //    localMatrix
    //    labelPointDemo1
    //    labelPointDemo2
    distributedMatrix
  }

  def vectorsDemo: Unit = {
    var denseWithoutArr = Vectors.dense(1, 3, 6, 8, 4)
    val denseWithArr = Vectors.dense(Array(1.0, 3, 6, 8, 4))
    val vectorsWithTwoArray = Vectors.sparse(6, Array(1, 3, 4, 5), Array(5, 6.0, 5, 5))
    val vectorsWithTuple = Vectors.sparse(6, Seq((1, 5.0), (3, 6.0), (4, 5.0), (5, 5.0)))
    assert(vectorsWithTuple == vectorsWithTwoArray)
    assert(denseWithoutArr == denseWithArr)
  }
}
