package com.atguigu.education.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.education.bean.QuestionUserCountBean;
import com.atguigu.education.util.ClickHouseUtil;
import com.atguigu.education.util.DateFormatUtil;
import com.atguigu.education.util.KafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.AllWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.util.Collector;

import java.time.Duration;

public class DwsQuestionUserCountWindow {
    public static void main(String[] args) throws Exception {
        // TODO 1 环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        // TODO 2 设置状态后端
        /*
           env.enableCheckpointing(5 * 60 * 1000L, CheckpointingMode.EXACTLY_ONCE );
           env.getCheckpointConfig().setCheckpointTimeout( 3 * 60 * 1000L );
           env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
           env.setStateBackend(new HashMapStateBackend());
           env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
           System.setProperty("HADOOP_USER_NAME", "atguigu");
        */
        // TODO 3 从Kafka中考试主题数据
        String topicName = "topic_db";
        String groupID = "dws_question_user_count_window";
        DataStreamSource<String> dbStream = env.addSource(KafkaUtil.getKafkaConsumer(topicName, groupID));
        // TODO 4 过滤并转换数据类型，根据状态去重得到独立考试人数
        SingleOutputStreamOperator<JSONObject> jsonObjStream = dbStream.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String value, Collector<JSONObject> out) throws Exception {
                try {
                    JSONObject jsonObject = JSON.parseObject(value);
                    String table = jsonObject.getString("table");
                    String type = jsonObject.getString("type");
                    JSONObject data = jsonObject.getJSONObject("data");
                    if ("test_exam_question".equals(table) && "insert".equals(type)) {
                        out.collect(data);
                    }
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        });

        KeyedStream<JSONObject, Tuple2<String, String>> keyedStream =
                jsonObjStream.keyBy(new KeySelector<JSONObject, Tuple2<String, String>>() {
                    @Override
                    public Tuple2<String, String> getKey(JSONObject value) throws Exception {
                        return new Tuple2<>(value.getString("question_id"), value.getString("user_id"));
                    }
                });

        SingleOutputStreamOperator<QuestionUserCountBean> beanStream =
                keyedStream.flatMap(new RichFlatMapFunction<JSONObject, QuestionUserCountBean>() {

                    ValueState<String> lastTestDtState = null;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        ValueStateDescriptor<String> lastTestDtDescriptor = new ValueStateDescriptor<>("last_test_dt",
                                String.class);
                        lastTestDtDescriptor.enableTimeToLive(StateTtlConfig
                                .newBuilder(Time.days(1L))
                                .setUpdateType(StateTtlConfig.UpdateType.OnCreateAndWrite)
                                .build());
                        lastTestDtState = getRuntimeContext().getState(lastTestDtDescriptor);
                    }

                    @Override
                    public void flatMap(JSONObject value, Collector<QuestionUserCountBean> out) throws Exception {
                        Long ts = DateFormatUtil.toTs(value.getString("create_time"), true);
                        Long isCorrect = value.getLong("is_correct");
                        String questionId = value.getString("question_id");
                        String testDt = DateFormatUtil.toDate(ts);
                        String lastTestDt = lastTestDtState.value();
                        if ((lastTestDt == null || !lastTestDt.equals(testDt))&&isCorrect == 1) {
                            out.collect(new QuestionUserCountBean("", "", questionId, 1L, 1L, ts));
                            lastTestDtState.update(testDt);
                        }
                        if (lastTestDt == null ||!lastTestDt.equals(testDt)) {
                            out.collect(new QuestionUserCountBean("", "", questionId, 1L, 0L, ts));
                            lastTestDtState.update(testDt);
                        }
                    }
                });

        // TODO 5 设置水位线
        SingleOutputStreamOperator<QuestionUserCountBean> beanWithWatermarkStream = beanStream
                .assignTimestampsAndWatermarks(WatermarkStrategy.<QuestionUserCountBean>forBoundedOutOfOrderness(Duration.ofSeconds(2))
                        .withTimestampAssigner(new SerializableTimestampAssigner<QuestionUserCountBean>() {
                            @Override
                            public long extractTimestamp(QuestionUserCountBean element, long recordTimestamp) {
                                return element.getTs();
                            }
                        }));

        // TODO 6 开窗聚合
        SingleOutputStreamOperator<QuestionUserCountBean> reducedStream =
                beanWithWatermarkStream.windowAll(TumblingEventTimeWindows.of(org.apache.flink.streaming.api.windowing.time.Time.seconds(10L)))
                        .reduce(new ReduceFunction<QuestionUserCountBean>() {
                            @Override
                            public QuestionUserCountBean reduce(QuestionUserCountBean value1, QuestionUserCountBean value2) throws Exception {
                                value1.setUserCount(value1.getUserCount() + value2.getUserCount());
                                value1.setUserCount(value1.getRightUserCount() + value2.getRightUserCount());
                                return value1;
                            }
                        }, new AllWindowFunction<QuestionUserCountBean, QuestionUserCountBean, TimeWindow>() {
                            @Override
                            public void apply(TimeWindow Window, Iterable<QuestionUserCountBean> input,
                                              Collector<QuestionUserCountBean> out) throws Exception {
                                QuestionUserCountBean questionUserCountBean = input.iterator().next();
                                questionUserCountBean.setStt(DateFormatUtil.toYmdHms(Window.getStart()));
                                questionUserCountBean.setEdt(DateFormatUtil.toYmdHms(Window.getEnd()));
                                questionUserCountBean.setTs(System.currentTimeMillis());
                                out.collect(questionUserCountBean);
                            }
                        });
        // TODO 7 写出到clickHouse中
        reducedStream.addSink(ClickHouseUtil.getClickHouseSinkFunc("insert into dws_question_user_count_window " +
                "values" +
                "(?,?,?,?,?,?)"));
        // TODO 8 执行
        env.execute(groupID);
    }
}
