package com.mjf.spark.day08

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * 求平均年龄---RDD算子方式实现
 */
object SparkSQL03_RDD {
  def main(args: Array[String]): Unit = {

    // 创建SparkConf配置文件
    val conf = new SparkConf().setMaster("local[*]").setAppName("")
    // 创建SparkContext对象
    val sc = new SparkContext(conf)

    // 创建一个RDD
    val rdd: RDD[(String, Int)] = sc.makeRDD(List(("lucy", 20), ("tina", 30), ("jack", 40)))

    val mapRDD: RDD[(Int, Int)] = rdd.map {
      case (name, age) => {
        (age, 1)
      }
    }

    // 对年龄以及总人数进行聚合操作   (ageSum,countSUM)
    val res: (Int, Int) = mapRDD.reduce {
      (t1, t2) => {
        (t1._1 + t2._1, t1._2 + t2._2)
      }
    }

    println(res._1 / res._2)

    // 关闭连接
    sc. stop()

  }
}
