package bigdata.hermesfuxi.eagle.etl.jobs;

import bigdata.hermesfuxi.eagle.etl.bean.DataLogBean;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.state.*;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.util.*;
import java.util.stream.Collectors;

/**
 * LiveGiftPointStatistics 测试版本 - 不依赖外部系统
 */
@Slf4j
public class LiveGiftPointStatisticsTest {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setMaxParallelism(128);
        env.setParallelism(1);

        // 创建测试数据流
        DataStream<DataLogBean> beanStream = env.fromElements(
            createTestData("liveReward", "anchor001", 1),
            createTestData("liveReward", "anchor002", 2),
            createTestData("liveReward", "anchor001", 3),
            createTestData("liveReward", "anchor003", 1),
            createTestData("liveReward", "anchor002", 4),
            createTestData("liveReward", "anchor001", 5)
        );

        beanStream.print("输入数据");

        // 创建礼物维表数据流
        DataStreamSource<Tuple4<Integer, String, Integer, Integer>> liveGiftMap = env.fromElements(
            Tuple4.of(1, "玫瑰", 10, 0),
            Tuple4.of(2, "跑车", 100, 0),
            Tuple4.of(3, "飞机", 500, 0),
            Tuple4.of(4, "火箭", 1000, 0),
            Tuple4.of(5, "城堡", 5000, 0)
        );

        // 被广播的一般是小表或字典表
        MapStateDescriptor<Integer, Tuple2<String, Integer>> mapStateDescriptor = new MapStateDescriptor<>("broadcastState", TypeInformation.of(Integer.class), TypeInformation.of(new TypeHint<Tuple2<String, Integer>>() {
        }));
        BroadcastStream<Tuple4<Integer, String, Integer, Integer>> broadcast = liveGiftMap.broadcast(mapStateDescriptor);
        
        // 先处理广播流，确保数据被加载到状态中
        liveGiftMap.process(new ProcessFunction<Tuple4<Integer, String, Integer, Integer>, Object>() {
            @Override
            public void processElement(Tuple4<Integer, String, Integer, Integer> value, Context ctx, Collector<Object> out) throws Exception {
                log.info("加载礼物数据: {}", value);
            }
        }).print("礼物数据加载");

        OutputTag<Tuple2<String, Integer>> anchorPointsTag = new OutputTag<Tuple2<String, Integer>>("anchorPoints") {
        };
        OutputTag<Tuple2<String, Integer>> giftCountTag = new OutputTag<Tuple2<String, Integer>>("giftCount") {
        };

        SingleOutputStreamOperator<DataLogBean> result = beanStream.connect(broadcast).process(new LiveGiftPointProcess(mapStateDescriptor, anchorPointsTag, giftCountTag, 3));
        result.print("处理结果");

        // 输出主播积分统计
        DataStream<Tuple2<String, Integer>> anchorPointsStream = result.getSideOutput(anchorPointsTag);
        SingleOutputStreamOperator<List<Map.Entry<String, Integer>>> anchorPointsResult = anchorPointsStream.keyBy(t -> t.f0).sum(1).keyBy(t -> "key").process(new SumOrderProcess());
        anchorPointsResult.print("主播积分统计");

        // 输出礼物统计
        DataStream<Tuple2<String, Integer>> giftCountStream = result.getSideOutput(giftCountTag);
        giftCountStream.keyBy(t->t.f0).sum(1).keyBy(t->"key").process(new SumOrderProcess()).print("礼物统计");

        env.execute("LiveGiftPointStatisticsTest");
    }

    private static DataLogBean createTestData(String eventId, String anchorId, Integer giftId) {
        DataLogBean dataLogBean = new DataLogBean();
        dataLogBean.setEventId(eventId);
        dataLogBean.setDeviceId("device" + System.currentTimeMillis());
        dataLogBean.setTimestamp(System.currentTimeMillis());
        dataLogBean.setAnchorId(anchorId);
        dataLogBean.setGiftId(giftId);
        
        Map<String, Object> properties = new HashMap<>();
        properties.put("anchor_id", anchorId);
        properties.put("gift_id", giftId);
        dataLogBean.setProperties(properties);
        
        return dataLogBean;
    }

    private static class LiveGiftPointProcess extends BroadcastProcessFunction<DataLogBean, Tuple4<Integer, String, Integer, Integer>, DataLogBean> {
        MapStateDescriptor<Integer, Tuple2<String, Integer>> mapStateDescriptor;

        private OutputTag<Tuple2<String, Integer>> anchorPointsTag;
        private OutputTag<Tuple2<String, Integer>> giftCountTag;

        public LiveGiftPointProcess(MapStateDescriptor<Integer, Tuple2<String, Integer>> mapStateDescriptor, OutputTag<Tuple2<String, Integer>> anchorPointsTag, OutputTag<Tuple2<String, Integer>> giftCountTag, Integer topN) {
            this.mapStateDescriptor = mapStateDescriptor;
            this.anchorPointsTag = anchorPointsTag;
            this.giftCountTag = giftCountTag;
        }

        @Override
        public void processElement(DataLogBean bean, ReadOnlyContext ctx, Collector<DataLogBean> out) throws Exception {
            ReadOnlyBroadcastState<Integer, Tuple2<String, Integer>> broadcastMap = ctx.getBroadcastState(mapStateDescriptor);
            Integer giftId = bean.getGiftId();
            String anchorId = bean.getAnchorId();
            
            if (giftId != null && anchorId != null && broadcastMap != null && broadcastMap.contains(giftId)) {
                Tuple2<String, Integer> stringIntegerTuple2 = broadcastMap.get(giftId);
                
                if (stringIntegerTuple2 != null) {
                    Integer points = stringIntegerTuple2.f1;
                    bean.setPoints(points);

                    String giftName = stringIntegerTuple2.f0;
                    bean.setGiftName(giftName);

                    out.collect(bean);
                    ctx.output(anchorPointsTag, Tuple2.of(anchorId, points));
                    ctx.output(giftCountTag, Tuple2.of(giftName, 1));
                }
            }
        }

        @Override
        public void processBroadcastElement(Tuple4<Integer, String, Integer, Integer> value, Context ctx, Collector<DataLogBean> out) throws Exception {
            Integer id = value.f0;
            Integer deleted = value.f3;
            BroadcastState<Integer, Tuple2<String, Integer>> broadcastState = ctx.getBroadcastState(mapStateDescriptor);
            if(deleted == 1){
                broadcastState.remove(id);
            }else {
                broadcastState.put(id, Tuple2.of(value.f1, value.f2));
            }
        }
    }

    public static class SumOrderProcess extends KeyedProcessFunction<String, Tuple2<String, Integer>, List<Map.Entry<String, Integer>>> {
        private transient ValueState<Map<String, Integer>> mapValueState;

        @Override
        public void open(Configuration parameters) throws Exception {
            mapValueState = getRuntimeContext().getState(new ValueStateDescriptor<>("listValueState", TypeInformation.of(new TypeHint<Map<String, Integer>>() {
            })));
        }

        @Override
        public void processElement(Tuple2<String, Integer> value, Context ctx, Collector<List<Map.Entry<String, Integer>>> out) throws Exception {
            Map<String, Integer> map = mapValueState.value();
            if(map == null){
                map = new HashMap<>();
            }
            map.put(value.f0, value.f1);
            mapValueState.update(map);

            long currentTimeMillis = System.currentTimeMillis();
            long triggerTime = currentTimeMillis - currentTimeMillis % 10000 + 10000;
            ctx.timerService().registerProcessingTimeTimer(triggerTime);
        }

        @Override
        public void onTimer(long timestamp, OnTimerContext ctx, Collector<List<Map.Entry<String, Integer>>> out) throws Exception {
            Map<String, Integer> map = mapValueState.value();
            List<Map.Entry<String, Integer>> list = map.entrySet().stream().sorted((a, b) -> b.getValue() - a.getValue()).collect(Collectors.toList());
            out.collect(list);
        }
    }
}
