package com.flinkBigWork.util;

import com.flinkBigWork.entity.Entity;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.java.tuple.Tuple2;

import java.util.HashMap;
import java.util.Map;


public class OneSecondAggregate1 implements AggregateFunction<Entity, Tuple2<Integer, String>, Tuple2<Integer, String>> {
    @Override
    public Tuple2<Integer, String> createAccumulator() {
        return Tuple2.of(0, "");
    }

    @Override
    public Tuple2<Integer, String> add(Entity entity, Tuple2<Integer, String> accumulator) {
        String time=entity.getTimestamp();
        Integer count=accumulator.f0+1;
        Tuple2<Integer, String> accumulator2=new Tuple2<>(count,time);

        return accumulator2;
    }

    @Override
    public Tuple2<Integer, String> getResult(Tuple2<Integer, String> accumulator) {
        return accumulator;
    }

    @Override
    public Tuple2<Integer, String> merge(Tuple2<Integer, String> acc1, Tuple2<Integer, String> acc2) {
        // 合并两个累加器的 count 值
        int mergedCount = acc1.f0 + acc2.f0;

        // 选择时间戳：这里假设我们选择第一个累加器的时间戳作为合并后的时间戳
        // 如果有其他逻辑需求，可以根据实际情况调整
        String mergedTime = acc1.f1;

        // 返回合并后的累加器
        return Tuple2.of(mergedCount, mergedTime);
    }


}

