package org.example.com.atguigu.day05;

import org.apache.spark.sql.Encoder;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.expressions.Aggregator;

import static org.apache.spark.sql.functions.udaf;

public class AvgAgg extends Aggregator<Integer, ScoreBuff, Double> {
    // 初始化中间变量
    @Override
    public ScoreBuff zero() {
        return new ScoreBuff(0, 0);
    }


    // combiner计算
    // tmp: 中间变量
    // score: 待计算的分数
    @Override
    public ScoreBuff reduce(ScoreBuff tmp, Integer score) {
        tmp.setSum(tmp.getSum() + score);
        tmp.setCount(tmp.getCount() + 1);
        return tmp;
    }

    // reducer计算
    // tmp: 中间变量
    // buff: 待计算的值(之前combiner的结果)
    @Override
    public ScoreBuff merge(ScoreBuff tmp, ScoreBuff buff) {
        tmp.setSum(tmp.getSum() + buff.getSum());
        tmp.setCount(buff.getCount() + tmp.getCount());
        return tmp;
    }

    // 计算最终结果
    @Override
    public Double finish(ScoreBuff reduction) {
        return (reduction.getSum() + 0.0) / reduction.getCount();
    }

    // 指定中间变量类型的编码格式
    @Override
    public Encoder<ScoreBuff> bufferEncoder() {
        return Encoders.bean(ScoreBuff.class);
    }

    // 指定最终结果的编码格式
    @Override
    public Encoder<Double> outputEncoder() {
        return Encoders.DOUBLE();
    }
}
