package com.bjsxt.test

/**
 * Created by root on 2015/4/30.
 */

import org.apache.spark.{SparkContext, SparkConf}
import org.apache.spark.mllib.linalg.Vector
import org.apache.spark.mllib.linalg.distributed.RowMatrix
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.regression.LabeledPoint
import org.apache.spark.mllib.util.MLUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.mllib.linalg.{Matrix, Matrices}
import org.apache.spark.mllib.linalg.distributed.{BlockMatrix, CoordinateMatrix, MatrixEntry}
import org.apache.spark.mllib.stat.{MultivariateStatisticalSummary, Statistics}


object DataTypes {

  def main(args: Array[String]) = {
    /*
    // Local Vector
    // Create a dense vector (1.0, 0.0, 3.0).
    val dv: Vector = Vectors.dense(1.0, 0.0, 3.0)
    // Create a sparse vector (1.0, 0.0, 3.0) by specifying its indices and values corresponding to nonzero entries.
    val sv1: Vector = Vectors.sparse(3, Array(0, 2), Array(1.0, 3.0))
    // Create a sparse vector (1.0, 0.0, 3.0) by specifying its nonzero entries.
    val sv2: Vector = Vectors.sparse(3, Seq((0, 1.0), (2, 3.0)))
    println(dv)
    println(sv1)
    println(sv2)

*/
    // Labeled Point
    // Create a labeled point with a positive label and a dense feature vector.
    val pos = LabeledPoint(1.0, Vectors.dense(1.0, 0.0, 3.0))
    // Create a labeled point with a negative label and a sparse feature vector.
    val neg = LabeledPoint(0.0, Vectors.sparse(3, Array(0, 2), Array(1.0, 3.0)))
    //    println(pos)
    //    println(neg)

    // MLUtils.loadLibSVMFile reads training examples stored in LIBSVM format.
    //        val conf = new SparkConf().setAppName("Data Types").setMaster("local")
    //        val sc = new SparkContext(conf)
    //        val examples: RDD[LabeledPoint] = MLUtils.loadLibSVMFile(sc, "sample_libsvm_data.txt")
    //        examples.foreach(println)
    //        println(examples.count())


    // Local Matrix
    // Create a dense matrix ((1.0, 2.0), (3.0, 4.0), (5.0, 6.0))
    //        val dm: Matrix = Matrices.dense(3, 2, Array(1.0, 3.0, 5.0, 2.0, 4.0, 6.0))
    //        println(dm)


    /*
            // Distributed Matrix
            // Block Matrix
            val conf = new SparkConf().setAppName("Data Types").setMaster("local")
            val sc = new SparkContext(conf)
            val entries: RDD[MatrixEntry] = sc.textFile("data/mllib/latest5.txt").map(_.split(' ')).map(_.map(_.toDouble)).map(m => (m(0).toLong,m(1).toLong,m(2))).map(k => new MatrixEntry(k._1,k._2,k._3)) // an RDD of (i, j, v) matrix entries
            entries.foreach(println)
            // Create a CoordinateMatrix from an RDD[MatrixEntry].
            val coordMat: CoordinateMatrix = new CoordinateMatrix(entries)
            println(coordMat)
            // Transform the CoordinateMatrix to a BlockMatrix
            val matA: BlockMatrix = coordMat.toBlockMatrix().cache()
            // Validate whether the BlockMatrix is set up properly. Throws an Exception when it is not valid.
            // Nothing happens if it is valid.
            matA.validate()
            println(matA)
            // Calculate A^T A.
            val ata = matA.transpose.multiply(matA)
            println(ata)
    */
    /*

        // RowMatrix
        val conf = new SparkConf().setAppName("Data Types").setMaster("local")
        val sc = new SparkContext(conf)
  val rows: RDD[Vector] = sc.textFile("data.txt").map{ line =>
    val values = line.split(' ').map(_.toDouble)
//      Vectors.sparse(values.length, values.zipWithIndex.map(e => (e._2, e._1)).filter(_._2 != 0.0))
      Vectors.dense(values)
        } // an RDD of local vectors
        rows.foreach(println)
        // Create a RowMatrix from an RDD[Vector].
        val mat: RowMatrix = new RowMatrix(rows)
        // Get its size.
        val m = mat.numRows()
        val n = mat.numCols()
        print(m,n)
    */

    val conf = new SparkConf().setAppName("Data Types").setMaster("local")
    val sc = new SparkContext(conf)
    val observations: RDD[Vector] = sc.textFile("data.txt").map { line =>
      val values = line.split(' ').map(_.toDouble)
      Vectors.sparse(values.length, values.zipWithIndex.map(e => (e._2, e._1)).filter(_._2 != 0.0))
      //Vectors.dense(values)
    } // an RDD of local vectors
    // compute column summary statistics.
    val summary: MultivariateStatisticalSummary = Statistics.colStats(observations)
    println(summary.mean)
    println(summary.variance)
    println(summary.numNonzeros)
  }
}
