package com.bw.spark02.rdd.action

import org.apache.spark.{SparkConf, SparkContext}


object RDD_21_histogram {
    
    def main(args: Array[String]): Unit = {
        
        // 初始化编程入口
        val sparkConf = new SparkConf()
        sparkConf.setMaster("local")
        sparkConf.setAppName("RDD_Test")
        val sc = new SparkContext(sparkConf)
        
        /**
         * 第一个案例
         */
        val dataRDD = sc.parallelize(List(1.0, 1.2, 1.3, 2.0, 2.1, 7.4, 7.5, 7.6, 8.8, 9.0, 11), 3)
        val resultValue: (Array[Double], Array[Long]) = dataRDD.histogram(5)
        
        println(resultValue._1.length, resultValue._2.length)
        
        for (index <- 0 until resultValue._2.length) {
            println(resultValue._1(index) + "\t" + resultValue._2(index))
        }
        
        println("-------------------------------------------------")
        /**
         * 第二个案例
         */
        val dataRDD2 = sc.parallelize(List(9.1, 1.0, 1.2, 2.1, 1.3, 5.0, 2.0, 2.1, 7.4, 7.5, 7.6, 8.8, 10.0, 8.9,
            5.5), 3)
        val resultValue1: (Array[Double], Array[Long]) = dataRDD2.histogram(6)
        
        for (index <- 0 until resultValue1._2.length) {
            println(resultValue1._1(index) + "\t" + resultValue1._2(index))
        }
        
    }
}
