package com.flinkBigWork.util;

import com.flinkBigWork.entity.Entity;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.java.tuple.Tuple2;

import java.util.HashMap;
import java.util.Map;


public class OneSecondAggregate4 implements AggregateFunction<Entity, Tuple2<Map<String , Integer>, String>, Tuple2<Map<String , Integer>, String>> {
    @Override
    public Tuple2<Map<String , Integer>, String> createAccumulator() {
        return Tuple2.of(new HashMap<>(), "");
    }

    @Override
    public Tuple2<Map<String, Integer>, String> add(Entity entity, Tuple2<Map<String, Integer>, String> accumulator) {
        String windowId=entity.getWindowId();
        String time=entity.getTimestamp();
        Integer queueLength=entity.getQueueLength();

        Map<String, Integer> map1=accumulator.f0;
        map1.put(windowId,queueLength);

        Tuple2<Map<String, Integer>, String> accumulator2=new Tuple2<>(map1,time);

        return accumulator2;
    }

    @Override
    public Tuple2<Map<String, Integer>, String> getResult(Tuple2<Map<String, Integer>, String> accumulator) {
        return accumulator;
    }

    @Override
    public Tuple2<Map<String, Integer>, String> merge(
            Tuple2<Map<String, Integer>, String> acc1,
            Tuple2<Map<String, Integer>, String> acc2) {
        // 创建一个新的 Map 来存储合并后的结果
        Map<String, Integer> mergedMap = new HashMap<>();

        // 合并第一个累加器的 Map
        for (Map.Entry<String, Integer> entry : acc1.f0.entrySet()) {
            String windowId = entry.getKey();
            Integer queueLength1 = entry.getValue();

            // 如果第二个累加器中也有相同的 windowId，则保留较大的队列长度
            Integer queueLength2 = acc2.f0.get(windowId);
            if (queueLength2 != null) {
                mergedMap.put(windowId, Math.max(queueLength1, queueLength2));
            } else {
                // 如果第二个累加器中没有相同的 windowId，直接添加
                mergedMap.put(windowId, queueLength1);
            }
        }

        // 合并第二个累加器的 Map
        for (Map.Entry<String, Integer> entry : acc2.f0.entrySet()) {
            String windowId = entry.getKey();
            Integer queueLength2 = entry.getValue();

            // 如果第一个累加器中没有相同的 windowId，直接添加到合并后的 Map
            if (!mergedMap.containsKey(windowId)) {
                mergedMap.put(windowId, queueLength2);
            }
        }

        // 选择时间戳：这里选择第一个累加器的时间戳作为合并后的时间戳
        // 如果有其他逻辑需求，可以根据实际情况调整
        String mergedTime = acc1.f1;

        // 返回合并后的累加器
        return Tuple2.of(mergedMap, mergedTime);
    }




}

