package com.mjf.spark.day08

import org.apache.spark.SparkConf
import org.apache.spark.sql.expressions.{MutableAggregationBuffer, UserDefinedAggregateFunction}
import org.apache.spark.sql.types.{DataType, DoubleType, IntegerType, StructField, StructType}
import org.apache.spark.sql.{DataFrame, Row, SparkSession}

/**
 * 自定义UDAF(弱类型  主要应用在SQL风格的DF查询)
 */
object SparkSQL05_UDAF {
  def main(args: Array[String]): Unit = {

    // 创建SparkConf配置文件对象
    val conf: SparkConf = new SparkConf().setAppName("SparkSQL").setMaster("local[*]")

    // 创建SparkSQL执行的入口对象  SparkSession
    val spark: SparkSession = SparkSession.builder().config(conf).getOrCreate()

    // 读取json文件，创建DF
    val df: DataFrame = spark.read.json("D:\\code\\spark\\input\\test.json")

    // 注册自定义UDAF函数
    spark.udf.register("myAvg", new myAvg)

    // 创建临时视图
    df.createOrReplaceTempView("user")

    // 使用聚合函数进行查询
//    spark.sql("select avg(age) from user").show()
    spark.sql("select myAvg(age) from user").show()

    // 释放资源
    spark.close()

  }
}

// 自定义UDAF函数（弱类型）
class myAvg extends UserDefinedAggregateFunction {

  // 聚合函数的输入数据的类型
  override def inputSchema: StructType = {
    StructType(Array(StructField("age", IntegerType)))
  }

  // 缓存数据的类型
  override def bufferSchema: StructType = {
    StructType(Array(StructField("sum", IntegerType), StructField("count", IntegerType)))
  }

  // 聚合函数返回的数据类型  UDAF函数返回值只有一个，返回值类型也只有一个
  override def dataType: DataType = DoubleType

  // 稳定性  默认不处理，直接返回true    相同输入是否会得到相同的输出
  override def deterministic: Boolean = true

  // 初始化  缓存设置到初始状态
  override def initialize(buffer: MutableAggregationBuffer): Unit = {
    buffer(0) = 0   // 让缓存中年龄总和归零
    buffer(1) = 0   // 让缓存中总人数归零
  }

  // 更新缓存数据
  override def update(buffer: MutableAggregationBuffer, input: Row): Unit = {
    if(!buffer.isNullAt(0)) {   // 如果buffer不为空
//      println(buffer.getInt(0), input.getInt(0))
      buffer(0) = buffer.getInt(0) + input.getInt(0)
      buffer(1) = buffer.getInt(1) + 1
    }
  }

  // 分区间合并
  override def merge(buffer1: MutableAggregationBuffer, buffer2: Row): Unit = {
    buffer1(0) = buffer1.getInt(0) + buffer2.getInt(0)
    buffer1(1) = buffer1.getInt(1) + buffer2.getInt(1)
  }

  // 计算逻辑
  override def evaluate(buffer: Row): Any = {
    buffer.getInt(0).toDouble / buffer.getInt(1)
  }
}