package scala.org.zjt.spark

import org.apache.spark.{SparkConf, SparkContext}

/**
  * DESC  aggregate求平均数
  *   aggregate((0,0))((sum,element)=> (sum._1+element,sum._2+1),(sum,sum)=>(sum._1+b._1,sum._2+b._2))
  *
  **/
object AvgSpark extends App {
  var sparkConf = new SparkConf().setMaster("local").setAppName("avg-spark")
  val sc = new SparkContext(sparkConf)

  // TODO: aggregate(设置默认值)(执行相加b为元素，执行合并)   中的b为list中的元素。
  val result = sc.parallelize(List(1, 2, 4, 43, 2, 3, 3, 43))
    .aggregate((0, 0))((sum, element) => (sum._1 + element, sum._2 + 1), (sum1, sum2) => (sum1._1 + sum2._1, sum1._2 + sum2._2))

  println(result.toString())
  println(result._1 / result._2 * 0.1)


  val sum = sc.parallelize(List(1, 2, 4, 43, 30, 3, 3, 43)).filter{ case a => a >2 }.filter( a => a < 40 ).reduce((a, b) => a + b)
  println(sum)

  sc.stop()

}
