package com.edu.realtime.app.dws;

import java.time.Duration;
import java.util.concurrent.TimeUnit;

import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.state.MapState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.AllWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import com.alibaba.fastjson.JSONObject;
import com.edu.realtime.app.func.DimAsyncFunction;
import com.edu.realtime.bean.TestPaperScorePhase;
import com.edu.realtime.util.DateFormatUtil;
import com.edu.realtime.util.MyClickhouseUtil;
import com.edu.realtime.util.MyKafkaUtil;

/**
 * Created on 2022/10/21.
 *
 * @author Topus
 * @desc 考试域：各试卷分段分布人数
 */
public class DwsTestPaperScorePhaseWindow {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);

        /*
        从dwd_test_exam主题中读取考试数据
        判断试卷名称和试卷分段进行相关处理拼接keyBy字段 paper_id+判断后的等级ABCD（A：90-100，B：75-89，C：60-74，D：0-59）
         */
        String topic = "dwd_test_exam";
        String groupId = "dws_test_paper_score_phase_window_group";
        DataStreamSource<String> source = env.addSource(MyKafkaUtil.getKafkaConsumer(topic, groupId));
        SingleOutputStreamOperator<TestPaperScorePhase> mapDS = source.map(str -> {
            JSONObject jsonObj = JSONObject.parseObject(str);
            Double score = jsonObj.getDouble("score");
            String userId = jsonObj.getString("user_id");
            String paperId = jsonObj.getString("paper_id");
            Long ts = jsonObj.getLong("ts") * 1000L;
            String level = "";

            // 判断score分段
            if (score >= 90) {
                level = "A";
            } else if (score >= 75) {
                level = "B";
            } else if (score >= 60) {
                level = "C";
            } else {
                level = "D";
            }
            return new TestPaperScorePhase("", "", paperId, "", level, userId, 0, ts);
        });

        SingleOutputStreamOperator<TestPaperScorePhase> withWatermarkDS = mapDS.assignTimestampsAndWatermarks(
            WatermarkStrategy.<TestPaperScorePhase>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                .withTimestampAssigner(new SerializableTimestampAssigner<TestPaperScorePhase>() {
                    @Override
                    public long extractTimestamp(TestPaperScorePhase element, long recordTimestamp) {
                        return element.getTs();
                    }
                }));

        KeyedStream<TestPaperScorePhase, Tuple2<String, String>> keyedDS =
            withWatermarkDS.keyBy(new KeySelector<TestPaperScorePhase, Tuple2<String, String>>() {
                @Override
                public Tuple2<String, String> getKey(TestPaperScorePhase value) throws Exception {
                    return Tuple2.of(value.getPaperId(), value.getLevel());
                }
            });
        // 求独立人数
        SingleOutputStreamOperator<TestPaperScorePhase> processDS = keyedDS
            .process(new KeyedProcessFunction<Tuple2<String, String>, TestPaperScorePhase, TestPaperScorePhase>() {
                // 状态编程
                private MapState<String, Integer> mapState;

                @Override
                public void open(Configuration parameters) throws Exception {
                    MapStateDescriptor<String, Integer> mapStateDescriptor =
                        new MapStateDescriptor<>("test-paper-score-phase-state", String.class, Integer.class);
                    mapStateDescriptor.enableTimeToLive(StateTtlConfig.newBuilder(Time.days(1)).build());
                    mapState = getRuntimeContext().getMapState(mapStateDescriptor);
                }

                @Override
                public void processElement(TestPaperScorePhase value,
                    KeyedProcessFunction<Tuple2<String, String>, TestPaperScorePhase, TestPaperScorePhase>.Context ctx,
                    Collector<TestPaperScorePhase> out) throws Exception {

                    String userId = value.getUserId();
                    if (!mapState.contains(userId)) {
                        mapState.put(userId, 1);
                        value.setUserCount(1);
                    }
                    out.collect(value);
                }
            });

        // 开窗
        AllWindowedStream<TestPaperScorePhase, TimeWindow> windowDS = processDS
            .windowAll(TumblingEventTimeWindows.of(org.apache.flink.streaming.api.windowing.time.Time.seconds(10)));
        // 聚合
        SingleOutputStreamOperator<TestPaperScorePhase> reduceDS =
            windowDS.reduce(new ReduceFunction<TestPaperScorePhase>() {
                @Override
                public TestPaperScorePhase reduce(TestPaperScorePhase value1, TestPaperScorePhase value2)
                    throws Exception {
                    value1.setUserCount(value1.getUserCount() + value2.getUserCount());
                    return value1;
                }
            }, new AllWindowFunction<TestPaperScorePhase, TestPaperScorePhase, TimeWindow>() {
                @Override
                public void apply(TimeWindow window, Iterable<TestPaperScorePhase> values,
                    Collector<TestPaperScorePhase> out) throws Exception {
                    String stt = DateFormatUtil.toYmdHms(window.getStart());
                    String edt = DateFormatUtil.toYmdHms(window.getEnd());

                    for (TestPaperScorePhase scorePhase : values) {
                        scorePhase.setStt(stt);
                        scorePhase.setEdt(edt);
                        scorePhase.setTs(System.currentTimeMillis());
                        out.collect(scorePhase);
                    }
                }
            });
        // 将reduce的数据和维度关联起来
        SingleOutputStreamOperator<TestPaperScorePhase> joinDS =
            AsyncDataStream.unorderedWait(reduceDS, new DimAsyncFunction<TestPaperScorePhase>("dim_test_paper") {
                @Override
                public void join(TestPaperScorePhase input, JSONObject dimInfo) {
                    input.setPaperName(dimInfo.getString("PAPER_TITLE"));
                }

                @Override
                public String getKey(TestPaperScorePhase obj) {
                    return obj.getPaperId();
                }
            }, 120, TimeUnit.SECONDS);

        joinDS.print("joinDS>>");
        // 向clickhouse中写数据
        joinDS.addSink(MyClickhouseUtil.<
            TestPaperScorePhase>getSinkFunction("insert into dws_test_paper_score_phase_window values(?,?,?,?,?,?,?)"));

        env.execute();
    }
}
