package com.atguigu.app.dws;

import com.alibaba.fastjson.JSONObject;
import com.atguigu.bean.LearnExamPaperBean;
import com.atguigu.func.WindowFuncUtil;
import com.atguigu.utils.KafkaUtil;
import com.atguigu.utils.MyClickHouseUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.math.BigDecimal;
import java.time.Duration;

/**
 * ClassName: LearnExamPaperWindow
 * Package: com.atguigu.app.dws
 * Description:
 *
 * @Author Lovxy
 * @Create 2023/5/18 12:09
 * @Version 1.0
 */
public class DwsLearnExamPaperWindow {
    public static void main(String[] args) throws Exception {
        //TODO 1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
//        env.enableCheckpointing(3000L, CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointTimeout(60 * 1000L);
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
//        env.getCheckpointConfig().enableExternalizedCheckpoints(
//                CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION
//        );
//        env.setRestartStrategy(RestartStrategies.failureRateRestart(
//                10, Time.of(1L, TimeUnit.DAYS), Time.of(3L, TimeUnit.MINUTES)
//        ));
//        env.setStateBackend(new HashMapStateBackend());
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
//        System.setProperty("HADOOP_USER_NAME", "atguigu");

        //TODO 2.读取kafka dwd_learn_exam_detail主题数据
        DataStreamSource<String> kafkaSource = env.fromSource(KafkaUtil.getKafkaSource("dwd_learn_exam_detail", "dws_paper_exam")
                , WatermarkStrategy.noWatermarks(), "kafka-source");

        //TODO 3.转换为javaBean
        SingleOutputStreamOperator<LearnExamPaperBean> mapDs = kafkaSource.map(new MapFunction<String, LearnExamPaperBean>() {
            @Override
            public LearnExamPaperBean map(String value) throws Exception {
                JSONObject jsonObject = JSONObject.parseObject(value);
                String paperId = jsonObject.getString("paper_id");
                String paperTitle = jsonObject.getString("paper_title");
                BigDecimal score = jsonObject.getBigDecimal("score");
                Long durationSec = jsonObject.getLong("duration_sec");
                Long createTime = jsonObject.getLong("create_time");
                return LearnExamPaperBean.builder()
                        .paperId(paperId)
                        .paperTitle(paperTitle)
                        .totalScore(score)
                        .totalTime(durationSec)
                        .ts(createTime)
                        .userCount(1L)
                        .build();
            }
        });
        //TODO 4.提取时间戳生成WaterMark
        SingleOutputStreamOperator<LearnExamPaperBean> mapDsWithWm = mapDs.assignTimestampsAndWatermarks(WatermarkStrategy.<LearnExamPaperBean>forBoundedOutOfOrderness(Duration.ofSeconds(2))
                .withTimestampAssigner(new SerializableTimestampAssigner<LearnExamPaperBean>() {
                    @Override
                    public long extractTimestamp(LearnExamPaperBean element, long recordTimestamp) {
                        return element.getTs();
                    }
                }));

        //TODO 5.分组开窗聚合
        SingleOutputStreamOperator<LearnExamPaperBean> resultDs = mapDsWithWm.keyBy(LearnExamPaperBean::getPaperId)
                .window(TumblingEventTimeWindows.of(Time.seconds(10)))
                .reduce(new ReduceFunction<LearnExamPaperBean>() {
                    @Override
                    public LearnExamPaperBean reduce(LearnExamPaperBean value1, LearnExamPaperBean value2) throws Exception {
                        value1.setUserCount(value1.getUserCount() + value2.getUserCount());
                        value1.setTotalScore(value1.getTotalScore().add(value2.getTotalScore()));
                        value1.setUserCount(value1.getUserCount() + value2.getUserCount());
                        value1.setTotalTime(value1.getTotalTime() + value2.getTotalTime());
                        return value1;
                    }
                }, new WindowFunction<LearnExamPaperBean, LearnExamPaperBean, String, TimeWindow>() {
                    @Override
                    public void apply(String s, TimeWindow window, Iterable<LearnExamPaperBean> input, Collector<LearnExamPaperBean> out) throws Exception {
                        WindowFuncUtil.setTes(window, input, out);
                    }
                });
        resultDs.print("resultDs>>>>");

        //TODO 6.写出到Clickhouse
        resultDs.addSink(MyClickHouseUtil.getSinkFunction("insert into dws_learn_exam_paper_window values(?,?,?,?,?,?,?,?)" ));

        env.execute();


    }
}
