package com.ada.spark.sparksql

import org.apache.spark.rdd.RDD
import org.apache.spark.sql._
import org.apache.spark.SparkConf
import org.apache.spark.sql.expressions.Aggregator

/**
  * 强类型的Dataset和弱类型的DataFrame都提供了相关的聚合函数， 如 count()，countDistinct()，avg()，max()，min()。
  * 除此之外，用户可以设定自己的自定义聚合函数。
  */
object SparkSql07_UDAF {

    def main(args: Array[String]) {

        //创建配置对象
        val conf: SparkConf = new SparkConf().setAppName("SparkSql07_UDAF").setMaster("local[*]")

        //创建SparkSql的环境对象
        val spark: SparkSession = SparkSession.builder().config(conf).getOrCreate();

        //进行转换之前，需要引入隐式转换规则
        //这里的spark不是包名的含义，是SparkSession对象的名字
        import spark.implicits._

        //创建RDD
        val rdd: RDD[(Int, String, Int)] = spark.sparkContext.makeRDD(List((1, "Jack", 20), (2, "Bob", 22), (3, "Lucy", 28)))

        //RDD -> DataSet
        val userRDD: RDD[UserBean] = rdd.map {
            case (id, name, age) => {
                UserBean(id, name, age)
            }
        }

        var userDS = userRDD.toDS()

        //val frame: DataFrame = userRDD.toDF()
        //val userDS: Dataset[UserBean] = frame.as[UserBean]

        //创建聚合函数对象
        val udaf = new MyAverageFunc

        //将聚合函数转换为查询列
        val avgCol: TypedColumn[UserBean, Double] = udaf.toColumn.name("avgAge")

        //应用函数
        userDS.select(avgCol).show()

        //释放资源
        spark.stop()

    }

}

case class UserBean(id: Int, name: String, age: Int)

case class AvgBuffer(var sum: Int, var count: Int)

/**
  * 声明自定义聚合函数(强类型)
  */
class MyAverageFunc extends Aggregator[UserBean, AvgBuffer, Double] {

    //初始化
    override def zero: AvgBuffer = {
        AvgBuffer(0, 0)
    }

    //聚合数据
    override def reduce(b: AvgBuffer, a: UserBean): AvgBuffer = {
        b.sum = b.sum + a.age
        b.count = b.count + 1
        b
    }

    //缓冲区合并操作
    override def merge(b1: AvgBuffer, b2: AvgBuffer): AvgBuffer = {
        b1.sum = b1.sum + b2.sum
        b1.count = b1.count + b2.count
        b1
    }

    //完成计算
    override def finish(reduction: AvgBuffer): Double = {
        reduction.sum.toDouble / reduction.count
    }

    override def bufferEncoder: Encoder[AvgBuffer] = Encoders.product

    override def outputEncoder: Encoder[Double] = Encoders.scalaDouble
}


