package com.atguigu.sql

import org.apache.spark.SparkConf
import org.apache.spark.sql.expressions.{MutableAggregationBuffer, UserDefinedAggregateFunction}
import org.apache.spark.sql.types.{DataType, DoubleType, LongType, StructType}
import org.apache.spark.sql.{DataFrame, Row, SparkSession}

/**
 * 自定义聚合函数
 */
object TestSparkSQL_UDAF {


  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf().setAppName("Spark SQL UDAF").setMaster("local[*]")

    //创建SparkConf()并设置App名称
    val sparkSession: SparkSession = SparkSession
      .builder()
      .config(conf)
      .getOrCreate()

    // For implicit conversions like converting RDDs to DataFrames
    // 导入隐式转换

    // 构建UDAF函数
    val ageAvgUDAF: AgeAvgUDAF = new AgeAvgUDAF

    // 注册函数
    sparkSession.udf.register("ageAvg", ageAvgUDAF)

    val dataFrame: DataFrame = sparkSession.read.json("E:\\user.json")

    dataFrame.createOrReplaceTempView("user")

    sparkSession.sql("select ageAvg(age) from user").show()

    sparkSession.stop()

  }

}

/**
 * 自定义年龄平均值聚合函数
 */
class AgeAvgUDAF extends UserDefinedAggregateFunction {


  /**
   * 聚合函数输入的数据结构
   *
   * @return
   */
  override def inputSchema: StructType = {
    new StructType().add("age", LongType).add("count", LongType)
  }

  /**
   * 聚合函数处理逻辑（缓存）的数据结构
   *
   * @return
   */
  override def bufferSchema: StructType = {
    new StructType().add("age", LongType)
  }

  /**
   * 聚合函数返回的结果的数据结构
   *
   * @return
   */
  override def dataType: DataType = DoubleType

  /**
   * 聚合函数的稳定性
   *
   * @return
   */
  override def deterministic: Boolean = true

  /**
   * 缓存的初始化
   *
   * @param buffer
   */
  override def initialize(buffer: MutableAggregationBuffer): Unit = {
    buffer(0) = 0L
    buffer(1) = 0L
  }

  /**
   * 同一个节点的缓存更新操作
   *
   * @param buffer
   * @param input
   */
  override def update(buffer: MutableAggregationBuffer, input: Row): Unit = {
    buffer(0) = buffer.getLong(0) + input.getLong(0)
    buffer(1) = buffer.getLong(1) + 1
  }

  /**
   * 不同节点之间的缓存合并
   *
   * @param buffer1
   * @param buffer2
   */
  override def merge(buffer1: MutableAggregationBuffer, buffer2: Row): Unit = {
    buffer1(0) = buffer1.getLong(0) + buffer2.getLong(0)
    buffer1(1) = buffer1.getLong(1) + buffer2.getLong(0)
  }

  /**
   * 计算结果
   *
   * @param buffer
   * @return
   */
  override def evaluate(buffer: Row): Any = {
    buffer.getLong(0).toDouble / buffer.getLong(1)
  }
}
