package com.flinkBigWork.util;

import com.flinkBigWork.entity.Entity;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;

import java.util.HashMap;
import java.util.Map;


public class OneSecondAggregate2 implements AggregateFunction<Entity, Tuple2<Map<String , Tuple2<Integer,Integer>>, String>, Tuple2<Map<String , Tuple2<Integer,Integer>>, String>> {
    @Override
    public Tuple2<Map<String, Tuple2<Integer, Integer>>, String> createAccumulator() {
        return Tuple2.of(new HashMap<>(), "");
    }

    @Override
    //服务时间，count
    public Tuple2<Map<String, Tuple2<Integer, Integer>>, String> add(Entity entity, Tuple2<Map<String, Tuple2<Integer, Integer>>, String> accumulator) {
        Map<String, Tuple2<Integer, Integer>> map1=accumulator.f0;


        String windowId = entity.getWindowId();
        String time = entity.getTimestamp();
        Integer thisCostTime = entity.getCostTime();

        Tuple2<Integer,Integer> tuple2=map1.get(windowId);
        if (tuple2 == null) {
            map1.put(windowId, new Tuple2<>(thisCostTime,1));
        } else {
            tuple2.f0+=thisCostTime;
            tuple2.f1+=1;
            map1.put(windowId,  tuple2);
//            System.out.println("tuple2.f0:   "+tuple2.f0);
        }
        Tuple2<Map<String, Tuple2<Integer, Integer>>, String> accumulator2=new Tuple2<>(map1,time);
        return accumulator2;
    }

    @Override
    public Tuple2<Map<String, Tuple2<Integer, Integer>>, String> getResult(Tuple2<Map<String, Tuple2<Integer, Integer>>, String> accumulator) {
        return accumulator;
    }

    @Override
    public Tuple2<Map<String, Tuple2<Integer, Integer>>, String> merge(
            Tuple2<Map<String, Tuple2<Integer, Integer>>, String> acc1,
            Tuple2<Map<String, Tuple2<Integer, Integer>>, String> acc2) {
        // 创建一个新的 Map 来存储合并后的结果
        Map<String, Tuple2<Integer, Integer>> mergedMap = new HashMap<>();

        // 合并第一个累加器的 Map
        for (Map.Entry<String, Tuple2<Integer, Integer>> entry : acc1.f0.entrySet()) {
            String windowId = entry.getKey();
            Tuple2<Integer, Integer> tuple1 = entry.getValue();

            // 如果第二个累加器中也有相同的 windowId，则合并
            Tuple2<Integer, Integer> tuple2 = acc2.f0.get(windowId);
            if (tuple2 != null) {
                // 合并 costTime 和 count
                mergedMap.put(windowId, new Tuple2<>(tuple1.f0 + tuple2.f0, tuple1.f1 + tuple2.f1));
            } else {
                // 如果第二个累加器中没有相同的 windowId，直接添加
                mergedMap.put(windowId, tuple1);
            }
        }

        // 合并第二个累加器的 Map
        for (Map.Entry<String, Tuple2<Integer, Integer>> entry : acc2.f0.entrySet()) {
            String windowId = entry.getKey();
            Tuple2<Integer, Integer> tuple2 = entry.getValue();

            // 如果第一个累加器中没有相同的 windowId，则直接添加到合并后的 Map
            if (!mergedMap.containsKey(windowId)) {
                mergedMap.put(windowId, tuple2);
            }
        }

        // 选择时间戳：这里选择第一个累加器的时间戳作为合并后的时间戳
        // 如果有其他逻辑需求，可以根据实际情况调整
        String mergedTime = acc1.f1;

        // 返回合并后的累加器
        return Tuple2.of(mergedMap, mergedTime);
    }


}

