package net.bwie.realtime.jtp.dws.douyin.log.job;

import com.alibaba.fastjson.JSON;
import net.bwie.realtime.jtp.dws.douyin.log.functions.DouYinTrendWindowFunction6;
import net.bwie.realtime.jtp.utils.DorisUtil;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.ConnectedStreams;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction;
import org.apache.flink.streaming.api.functions.co.RichCoFlatMapFunction;
import org.apache.flink.streaming.api.windowing.assigners.SlidingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import net.bwie.realtime.jtp.dws.douyin.log.bean.EventLog6;
import net.bwie.realtime.jtp.dws.douyin.log.bean.InteractionTrendMetric6;
import net.bwie.realtime.jtp.dws.douyin.log.bean.UserFanRelation6;
import net.bwie.realtime.jtp.utils.KafkaUtil;
import org.apache.flink.util.Collector;

import java.time.Duration;
import java.util.HashMap;
import java.util.Map;

public class DouYinTrendMonitor6 {
    public static void main(String[] args) throws Exception {
        // 1. 初始化Flink执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        env.enableCheckpointing(3000L);

        // 2. 读取Kafka数据源
        DataStream<String> eventLogKafkaStream = KafkaUtil.consumerKafka(env, "ods_live_event_log");
        DataStream<String> fanRelationKafkaStream = KafkaUtil.consumerKafka(env, "ods_user_fan_relation");

        // 3. 数据处理
        DataStream<String> resultStream = handle(eventLogKafkaStream, fanRelationKafkaStream);

        // 4. 打印结果
        resultStream.print("互动趋势指标→");

        // 5. 数据输出到Doris
        DorisUtil.saveToDoris(resultStream, "douyin_realtime_report", "dws_live_interaction_trend");

        // 6. 触发作业执行
        env.execute("DouYinTrendMonitor6");
    }

    private static DataStream<String> handle(
            DataStream<String> eventLogStream,
            DataStream<String> fanRelationStream) {

        // 步骤1：解析日志表数据
        SingleOutputStreamOperator<EventLog6> parsedEventLogStream = eventLogStream
                .map(jsonStr -> JSON.parseObject(jsonStr, EventLog6.class))
                .filter(eventLog -> eventLog != null);

        // 步骤2：解析粉丝关系表数据
        SingleOutputStreamOperator<UserFanRelation6> parsedFanRelationStream = fanRelationStream
                .map(jsonStr -> JSON.parseObject(jsonStr, UserFanRelation6.class))
                .filter(fanRelation -> fanRelation != null);

        // 步骤3：双流 Connect + 状态缓存关联
        ConnectedStreams<EventLog6, UserFanRelation6> connectedStreams = parsedEventLogStream
                .connect(parsedFanRelationStream);

        // 使用 RichCoFlatMapFunction 缓存粉丝关系数据
        SingleOutputStreamOperator<Tuple2<EventLog6, UserFanRelation6>> joinedStream = connectedStreams
                .flatMap(new RichCoFlatMapFunction<EventLog6, UserFanRelation6, Tuple2<EventLog6, UserFanRelation6>>() {
                    // 缓存粉丝关系数据：key为主播ID，value为粉丝关系对象
                    private Map<Long, UserFanRelation6> fanRelationCache = new HashMap<>();

                    @Override
                    public void flatMap1(EventLog6 eventLog, Collector<Tuple2<EventLog6, UserFanRelation6>> out) throws Exception {
                        // 处理日志流数据：关联缓存中的粉丝关系数据
                        UserFanRelation6 fanRelation = fanRelationCache.get(eventLog.getAnchorId());
                        if (fanRelation != null) {
                            out.collect(new Tuple2<>(eventLog, fanRelation));
                        } else {
                            // 若没有缓存的粉丝关系数据，可输出只含日志数据的元组（后续窗口处理时再判断）
                            out.collect(new Tuple2<>(eventLog, null));
                        }
                    }

                    @Override
                    public void flatMap2(UserFanRelation6 fanRelation, Collector<Tuple2<EventLog6, UserFanRelation6>> out) throws Exception {
                        // 处理粉丝关系流数据：缓存起来
                        fanRelationCache.put(fanRelation.getAnchorId(), fanRelation);
                    }
                });

        // 步骤4：滑动窗口聚合
        WindowedStream<Tuple2<EventLog6, UserFanRelation6>, Long, TimeWindow> windowStream = joinedStream
                .keyBy(tuple -> tuple.f0.getAnchorId())
                .window(SlidingProcessingTimeWindows.of(Time.seconds(5), Time.seconds(1)));

        SingleOutputStreamOperator<InteractionTrendMetric6> metricStream = windowStream
                .apply(new DouYinTrendWindowFunction6());

        // 步骤5：格式化结果为JSON
        SingleOutputStreamOperator<String> resultStream = metricStream
                .map(metric -> {
                    return String.format(
                            "{\"window_start_time\":\"%s\",\"window_end_time\":\"%s\",\"cur_date\":\"%s\",\"live_room_id\":\"%s\",\"anchor_id\":\"%s\",\"new_comment_count\":\"%s\",\"new_fan_count\":\"%s\"}",
                            metric.getWindowStartTime() != null ? metric.getWindowStartTime() : "",
                            metric.getWindowEndTime() != null ? metric.getWindowEndTime() : "",
                            metric.getCurDate() != null ? metric.getCurDate() : "",
                            metric.getLiveRoomId() != null ? metric.getLiveRoomId() : "",
                            metric.getAnchorId() != null ? metric.getAnchorId() : "",
                            metric.getNewCommentCount() >= 0 ? metric.getNewCommentCount() : "0",
                            metric.getNewFanCount() >= 0 ? metric.getNewFanCount() : "0"
                    );
                });

        return resultStream;
    }
}
