package com.atbeijing.bigdata.spark.sql

import org.apache.spark.SparkConf
import org.apache.spark.sql.expressions.Aggregator
import org.apache.spark.sql.{Encoder, Encoders, SparkSession, functions}

object SparkSQL07_UDAF_Class {

    def main(args: Array[String]): Unit = {

        // TODO 创建环境对象
        val sparkConf = new SparkConf().setMaster("local[*]").setAppName("SparkSQL")
        val spark: SparkSession = SparkSession.builder().config(sparkConf).getOrCreate()
        import spark.implicits._

        // RDD
        val rdd = spark.sparkContext.makeRDD(
            List(
                (1, "zhangsan", 30),
                (2, "lisi", 40),
                (3, "wangwu", 50)
            )
        )

        val df = rdd.toDF("id", "name", "age")

        df.createOrReplaceTempView("user")

        val udaf = new MyAgeAvgUDAF()
        spark.udf.register("ageAvg", functions.udaf(udaf))
        // sql语句中早期版本只支持弱类型的聚合函数, 不支持强类型的聚合函数
        // 在Spark3.0版本后，可以通过特殊操作，将强类型转换为弱类型函数。
        spark.sql("select ageAvg(age) from user").show


        spark.stop()
    }
    case class AgeBuffer( var total:Int, var cnt:Int )
    // TODO 自定义用户聚合函数（UDAF）- 强类型
    // 1. 继承Aggregator
    // 2. 定义泛型
    //    IN  : Int
    //    BUF : AgeBuffer
    //    OUT : Int
    // 3. 重写方法
    class MyAgeAvgUDAF extends Aggregator[Int, AgeBuffer, Int]{
        // 缓冲区的初始化
        override def zero: AgeBuffer = {
            AgeBuffer(0, 0)
        }

        // 根据输入的值更新缓冲区,输入为ageAvg(age)
        override def reduce(buf: AgeBuffer, input: Int): AgeBuffer = {
            buf.total += input
            buf.cnt += 1
            buf
        }

        // 缓冲区的合并
        override def merge(b1: AgeBuffer, b2: AgeBuffer): AgeBuffer = {
            b1.total += b2.total
            b1.cnt += b2.cnt
            b1
        }

        // 计算
        override def finish(buf: AgeBuffer): Int = {
            buf.total / buf.cnt
        }

        override def bufferEncoder: Encoder[AgeBuffer] = Encoders.product

        override def outputEncoder: Encoder[Int] = Encoders.scalaInt
    }
}
