package top.doe.spark_sql;

import org.apache.spark.sql.Row;
import org.apache.spark.sql.expressions.MutableAggregationBuffer;
import org.apache.spark.sql.expressions.UserDefinedAggregateFunction;
import org.apache.spark.sql.types.*;

public class MyAvg extends UserDefinedAggregateFunction {
    @Override
    public StructType inputSchema() {
        return new StructType(new StructField[]{
                new StructField("amt", DataTypes.DoubleType,true,Metadata.empty())
        });
    }

    @Override
    public StructType bufferSchema() {
        return new StructType(new StructField[]{
                new StructField("count", DataTypes.IntegerType,true,Metadata.empty()),
                new StructField("sum", DataTypes.DoubleType,true,Metadata.empty())
        });
    }

    @Override
    public DataType dataType() {
        return DataTypes.DoubleType;
    }

    @Override
    public boolean deterministic() {
        return true;
    }

    @Override
    public void initialize(MutableAggregationBuffer buffer) {

        buffer.update(0,0);
        buffer.update(1,0.0);

    }

    @Override
    public void update(MutableAggregationBuffer buffer, Row input) {

        double d = input.getDouble(0);

        System.out.println("update执行..... " + d );

        buffer.update(0,buffer.getInt(0)+1);
        buffer.update(1,buffer.getDouble(1)+d);


    }

    @Override
    public void merge(MutableAggregationBuffer buffer1, Row buffer2) {

        int cnt1 = buffer1.getInt(0);
        int cnt2 = buffer2.getInt(0);

        double d1 = buffer1.getDouble(1);
        double d2 = buffer2.getDouble(1);

        buffer1.update(0,cnt1+cnt2);
        buffer1.update(1,d1+d2);

    }

    @Override
    public Object evaluate(Row buffer) {
        return buffer.getDouble(1)/buffer.getInt(0);
    }
}
