package sparkcore.day7.lesson09

import org.apache.spark.sql.Row
import org.apache.spark.sql.expressions.{MutableAggregationBuffer, UserDefinedAggregateFunction}
import org.apache.spark.sql.types._

object UDAFDemo extends UserDefinedAggregateFunction{

  override def inputSchema: StructType = StructType(
    StructField("salary",DoubleType,true) :: Nil
  )


  override def dataType: DataType = DoubleType


  override def bufferSchema: StructType = StructType(
    StructField("total",DoubleType,true) ::
      StructField("count",IntegerType,true) ::
      Nil
  )

  override def evaluate(buffer: Row): Any = {
    val total = buffer.getDouble(0)
    val count = buffer.getInt(1)

    total/count
  }


  override def initialize(buffer: MutableAggregationBuffer): Unit = {
    buffer.update(0,0.0)
    buffer.update(1,0)
  }

  override def update(buffer: MutableAggregationBuffer, input: Row): Unit = {
    val lastTotal = buffer.getDouble(0)
    val lastCount = buffer.getInt(1)
    val currentSalary = input.getDouble(0)
    buffer.update(0,lastTotal + currentSalary)
    buffer.update(1,lastCount+1)

  }


  override def merge(buffer1: MutableAggregationBuffer, buffer2: Row): Unit = {
    val total1 = buffer1.getDouble(0)
    val count1 = buffer1.getInt(1)
    val total2 = buffer2.getDouble(0)
    val count2 = buffer2.getInt(1)

    buffer1.update(0,total1 + total2)
    buffer1.update(1,count1 + count2)

  }

  override def deterministic: Boolean = true
}
