package com.bigdata.spark.sql

import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Row, SparkSession}
import org.apache.spark.sql.expressions.{MutableAggregationBuffer, UserDefinedAggregateFunction}
import org.apache.spark.sql.types.{DataType, LongType, StructField, StructType}

object SparkSQL_Avg_Age_1 {

    def main(args: Array[String]): Unit = {

        // TODO 计算平均年龄
        val sparkConf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("sparkSQL")
        val spark: SparkSession = SparkSession.builder().config(sparkConf).getOrCreate()
        import spark.implicits._

        val df: DataFrame = spark.read.json("datas/user.json")
        val rdd: RDD[Row] = df.rdd

        val ageRDD: RDD[(Long, Int)] = rdd.map(
            row => {
                (row.getLong(0), 1)
            }
        )

        val ageTuple: (Long, Int) = ageRDD.reduce(
            (t1, t2) => {
                (t1._1 + t2._1, t1._2 + t2._2)
            }
        )
        val res: Long = ageTuple._1 / ageTuple._2
        println(res)


        spark.stop()
    }
}
