package com.offcn.bigdata.sql.p2

import org.apache.spark.sql.{Row, SparkSession}
import org.apache.spark.sql.expressions.{MutableAggregationBuffer, UserDefinedAggregateFunction}
import org.apache.spark.sql.types.{DataType, DataTypes, StructField, StructType}

/**
  * 自定义UDAF的操作
  */
object UDAFFunctionOps {
    def main(args: Array[String]): Unit = {
        val spark = SparkSession.builder()
            .appName("UDAFFunctionOps")
            .master("local[*]")
            .getOrCreate()
        //step 2 注册
        spark.udf.register("myAvg", new MyAvg)
        val pdf = spark.read.json("file:/E:/data/spark/sql/people.json")
        pdf.createOrReplaceTempView("person")

        val sql =
            """
              |select
              |  province,
              |  round(avg(height), 1) avg_height,
              |  round(myAvg(height), 1) my_avg_height
              |from person
              |group by province
            """.stripMargin
        spark.sql(sql).show()
        spark.stop()
    }
}
// step 1. 创建一个类覆盖UDAF，完成业务
class MyAvg extends UserDefinedAggregateFunction {
    //该udaf输入参数的类型
    override def inputSchema: StructType = StructType(
        Array(
            StructField("height", DataTypes.DoubleType, false)
        )
    )

    //该udaf临时缓存的数据类型
    override def bufferSchema: StructType = StructType(
        Array(
            StructField("sum", DataTypes.DoubleType, false),
            StructField("count", DataTypes.IntegerType, false)
        )
    )
    //返回值的数据类型
    override def dataType: DataType = DataTypes.DoubleType

    override def deterministic: Boolean = true
    //当前聚合函数的返回值结果
    override def evaluate(buffer: Row): Double = buffer.getDouble(0) / buffer.getInt(1)

    //局部初始化
    override def initialize(buffer: MutableAggregationBuffer): Unit = {
        buffer.update(0, 0.0d)//sum = 0
        buffer.update(1, 0)//count = 0
    }

    //局部聚合
    override def update(buffer: MutableAggregationBuffer, input: Row): Unit = {
        buffer.update(0, buffer.getDouble(0) + input.getDouble(0))//sum += height
        buffer.update(1, buffer.getInt(1) + 1)//count += 1
    }
    //全局聚合
    override def merge(buffer1: MutableAggregationBuffer, buffer2: Row): Unit = {
        buffer1.update(0, buffer1.getDouble(0) + buffer2.getDouble(0))//sum += sum1
        buffer1.update(1, buffer1.getInt(1) + buffer2.getInt(1))//count += count1
    }
}
