package com.king.spark.sql

import org.apache.spark.SparkConf
import org.apache.spark.sql._
import org.apache.spark.sql.expressions.{Aggregator, MutableAggregationBuffer, UserDefinedAggregateFunction}
import org.apache.spark.sql.types.{DataType, DoubleType, LongType, StructField, StructType}


object SparkSQL05_UDAF {
  def main(args: Array[String]): Unit = {
    //新的起点: SparkSession

    val sparkConf: SparkConf = new SparkConf().setAppName("SparkSQL").setMaster("local[*]")

    val spark: SparkSession =
          SparkSession.builder()
            //.appName("SparkSQL")
            //.master("local[*]")
            .config(sparkConf)
            .getOrCreate()

    //写代码不管用不用都导入。
    import spark.implicits._

    val df: DataFrame = spark.read.json("data/user.json")
    df.createOrReplaceTempView("user")

    spark.udf.register("my_avg", new My_Avg_Aggregator )

    spark.sql("select my_avg(age) from user").show


    spark.stop()

  }

  /**
    * 自定义UDAF(弱类型)
    * 1. 继承类
    * 2. 重写方法
    */

  class My_Avg_Aggregator  extends UserDefinedAggregateFunction{

    //输入数据的结构
    override def inputSchema: StructType = {
      StructType(
        Array(
            StructField("age",LongType)
        )
      )
    }

    //缓冲区数据的结构
    override def bufferSchema: StructType = {
      StructType(
        Array(
          StructField("totalAge",LongType),
          StructField("totalCount",LongType)
        )
      )
    }

    //函数返回数据的数据类型
    override def dataType: DataType = DoubleType

    //计算的稳定性
    override def deterministic: Boolean = true

    //缓冲区的初始化
    override def initialize(buffer: MutableAggregationBuffer): Unit = {
      buffer.update(0,0L)
      buffer.update(1,0L)
    }

    //将传入的数据更新到缓冲区
    override def update(buffer: MutableAggregationBuffer, input: Row): Unit = {
      buffer.update(0 , buffer.getLong(0) + input.getLong(0))
      buffer.update(1 , buffer.getLong(1) + 1 )
    }
    //合并缓冲区
    override def merge(buffer1: MutableAggregationBuffer, buffer2: Row): Unit = {
      buffer1.update(0, buffer1.getLong(0) + buffer2.getLong(0))
      buffer1.update(1, buffer1.getLong(1) + buffer2.getLong(1))
    }
    //计算
    override def evaluate(buffer: Row): Any = {
      buffer.getLong(0) / buffer.getLong(1).toDouble
    }
  }

}
