package com.gt.sql

import com.gt.SCUtil
import com.gt.sql.SparkDataFrame_05_UDAF.myUDAF
import org.apache.spark.sql.expressions.{Aggregator, MutableAggregationBuffer, UserDefinedAggregateFunction}
import org.apache.spark.sql.types._
import org.apache.spark.sql.{DataFrame, Dataset, Encoder, Encoders, Row, SparkSession, TypedColumn, functions}

/**
 * DSL 选择指定的列
 * 首先 -> 引入 import spark.implicits._
 *
 */
object SparkDataFrame_05_UDAF {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SCUtil.buildLocalSparkSession()
    import spark.implicits._

    val df: DataFrame = spark.read.json("data/user.json")
    val ds: Dataset[User] = df.as[User]

    //spark.udf.register("avgAge",functions.udaf(new myUDAF()))
    //ds.createOrReplaceTempView("user")
    //val resultDF: DataFrame = spark.sql("select avgAge(name,age) as avgAge from user")
    //resultDF.show()

    //早期版本中spark不支持在sql中使用UDAF
    //只能使用 dsl
    val udafCol: TypedColumn[User, Long] = new myUDAF().toColumn
    val resultDF: Dataset[Long] = ds.select(udafCol)
    resultDF.show()


    spark.close()
  }

  /**
   * 自定义聚合函数 ： 极端年龄平均值
   * 继承 org.apache.spar.sql.expressions.Aggregator
   * 泛型定义
   * IN   :User(name: String, age: Long)
   * BUFF :UserBuff(var total:Long, var count: Long)
   * OUT  :Long
   */
  case class User(name: String, age: Long)
  case class UserBuff(var total:Long, var count: Long)
  class myUDAF extends Aggregator[User,UserBuff,Long]{
    //缓冲区初始值
    override def zero: UserBuff = {
      UserBuff(0L, 0L)
    }

    //逐条数据累加（分区内）
    override def reduce(b: UserBuff, a: User): UserBuff = {
      b.total  += a.age
      b.count +=1
      b
    }

    //分区间数据合并
    override def merge(b1: UserBuff, b2: UserBuff): UserBuff = {
      b1.count += b2.count
      b1.total += b2.total
      b1
    }

    //使用缓冲区数据计算结果
    override def finish(reduction: UserBuff): Long = {
      reduction.total / reduction.count
    }

    //缓冲区数据序列化操作(缓冲区数据编码操作), 自定义类 就使用 Encoders.product
    override def bufferEncoder: Encoder[UserBuff] = Encoders.product

    //输出数据序列化操作(输出数据编码操作)
    override def outputEncoder: Encoder[Long] = Encoders.scalaLong
  }

}
