package com.atguigu.education.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.education.bean.CourseExamUserCountBean;
import com.atguigu.education.util.ClickHouseUtil;
import com.atguigu.education.util.DateFormatUtil;
import com.atguigu.education.util.KafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.AllWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.time.Duration;

public class DwsCourseExamUserCountWindow {
    public static void main(String[] args) throws Exception {
        // TODO 1 环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        // TODO 2 设置状态后端
        /*
           env.enableCheckpointing(5 * 60 * 1000L, CheckpointingMode.EXACTLY_ONCE );
           env.getCheckpointConfig().setCheckpointTimeout( 3 * 60 * 1000L );
           env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
           env.setStateBackend(new HashMapStateBackend());
           env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
           System.setProperty("HADOOP_USER_NAME", "atguigu");
        */
        // TODO 3 从Kafka中考试主题数据
        String topicName = "dwd_test_exam";
        String groupID = "dws_course_exam_user_count_window";
        DataStreamSource<String> courseExamStream = env.addSource(KafkaUtil.getKafkaConsumer(topicName, groupID));
        // TODO 4 转换数据类型，根据状态去重得到独立考试人数
        SingleOutputStreamOperator<JSONObject> jsonObjStream = courseExamStream.map(JSON::parseObject);

        KeyedStream<JSONObject, Tuple2<String, String>> keyedStream =
                jsonObjStream.keyBy(new KeySelector<JSONObject, Tuple2<String, String>>() {
                    @Override
                    public Tuple2<String, String> getKey(JSONObject value) throws Exception {
                        return new Tuple2<>(value.getString("course_id"), value.getString("user_id"));
                    }
                });

        SingleOutputStreamOperator<CourseExamUserCountBean> beanStream =
                keyedStream.flatMap(new RichFlatMapFunction<JSONObject, CourseExamUserCountBean>() {

                    ValueState<String> lastTestDtState = null;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        ValueStateDescriptor<String> lastTestDtDescriptor = new ValueStateDescriptor<>("last_test_dt",
                                String.class);
                        lastTestDtDescriptor.enableTimeToLive(StateTtlConfig
                                .newBuilder(Time.days(1L))
                                .setUpdateType(StateTtlConfig.UpdateType.OnCreateAndWrite)
                                .build());
                        lastTestDtState = getRuntimeContext().getState(lastTestDtDescriptor);
                    }

                    @Override
                    public void flatMap(JSONObject value, Collector<CourseExamUserCountBean> out) throws Exception {
                        Long ts = DateFormatUtil.toTs(value.getString("submit_time"), true);
                        String courseId = value.getString("course_id");
                        String CourseTestDt = DateFormatUtil.toDate(ts);
                        String lastTestDt = lastTestDtState.value();
                        if (lastTestDt == null || !lastTestDt.equals(CourseTestDt)) {
                            out.collect(new CourseExamUserCountBean("", "", courseId , 1L,ts));
                            lastTestDtState.update(CourseTestDt);
                        }
                    }
                });

        // TODO 5 设置水位线
        SingleOutputStreamOperator<CourseExamUserCountBean> beanWithWatermarkStream = beanStream
                .assignTimestampsAndWatermarks(WatermarkStrategy.<CourseExamUserCountBean>forBoundedOutOfOrderness(Duration.ofSeconds(2))
                        .withTimestampAssigner(new SerializableTimestampAssigner<CourseExamUserCountBean>() {
                            @Override
                            public long extractTimestamp(CourseExamUserCountBean element, long recordTimestamp) {
                                return element.getTs();
                            }
                        }));

        // TODO 6 开窗聚合
        SingleOutputStreamOperator<CourseExamUserCountBean> reducedStream =
                beanWithWatermarkStream.windowAll(TumblingEventTimeWindows.of(org.apache.flink.streaming.api.windowing.time.Time.seconds(10L)))
                        .reduce(new ReduceFunction<CourseExamUserCountBean>() {
                            @Override
                            public CourseExamUserCountBean reduce(CourseExamUserCountBean value1, CourseExamUserCountBean value2) throws Exception {
                                value1.setUserCount(value1.getUserCount() + value2.getUserCount());
                                return value1;
                            }
                        }, new AllWindowFunction<CourseExamUserCountBean, CourseExamUserCountBean, TimeWindow>() {
                            @Override
                            public void apply(TimeWindow Window, Iterable<CourseExamUserCountBean> input,
                                              Collector<CourseExamUserCountBean> out) throws Exception {
                                CourseExamUserCountBean courseExamUserCountBean = input.iterator().next();
                                courseExamUserCountBean.setStt(DateFormatUtil.toYmdHms(Window.getStart()));
                                courseExamUserCountBean.setEdt(DateFormatUtil.toYmdHms(Window.getEnd()));
                                courseExamUserCountBean.setTs(System.currentTimeMillis());
                                out.collect(courseExamUserCountBean);
                            }
                        });
        // TODO 7 写出到clickHouse中
        reducedStream.addSink(ClickHouseUtil.getClickHouseSinkFunc("insert into dws_course_exam_user_count_window " +
                "values" +
                "(?,?,?,?,?)"));
        // TODO 8 执行
        env.execute(groupID);
    }
}

