package com.itcast.spark.basePro

import org.apache.spark.mllib.linalg
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SparkSession
import org.apache.spark.mllib.stat.{MultivariateStatisticalSummary, Statistics}

/**
 * DESC:
 */
object _01IrisVarience {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setAppName("_01IrisVarience").setMaster("local[*]")
    val spark: SparkSession = SparkSession.builder().config(conf).getOrCreate()
    val sc: SparkContext = spark.sparkContext
    sc.setLogLevel("WARN")
    //读取数据
    val dataRDD: RDD[linalg.Vector] = sc.textFile("./datasets/mldata/iris-spaetal.csv")
      .map(_.toDouble)
      .map(x => Vectors.dense(x))
    //使用Summary
    val summary: MultivariateStatisticalSummary = Statistics.colStats(dataRDD)
    println("count is:",summary.count)
    println("nonzoreos is:",summary.numNonzeros)
    println("max is:",summary.max)
    println("min is:",summary.min)
    println("mean is:",summary.mean)
    println("varience is:",summary.variance)
  /*  (count is:,150)
    (nonzoreos is:,[150.0])
    (max is:,[7.9])
    (min is:,[4.3])
    (mean is:,[5.843333333333333])
    (varience is:,[0.6856935123042512])*/
  }
}
