package com.atguigu.edu.app.dws;

import com.alibaba.fastjson.JSONObject;
import com.atguigu.edu.bean.TestPaperExam;
import com.atguigu.edu.bean.avgTestPaperCourse;
import com.atguigu.edu.util.ClickHouseUtil;
import com.atguigu.edu.util.DateFormatUtil;
import com.atguigu.edu.util.KafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;

import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.text.DecimalFormat;
import java.time.Duration;

public class DwsTestPaperExamWindow {
    public static void main(String[] args) throws Exception {
        //todo 1 初始化环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
                env.setParallelism(1);
        //todo 2 配置状态后端
        /*env.enableCheckpointing(5 * 1000L, CheckpointingMode.AT_LEAST_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(3 * 60 *1000L);
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
        System.setProperty("HADOOP_USER_NAME","atguigu");
        env.setStateBackend(new HashMapStateBackend());*/

        //todo 3 从kafka获取主题数据
        String topicName = "dwd_test_exam_paper";
        String groupId = "dwd_test_exam_paper_window";
        DataStreamSource<String> dataStreamSource = env.addSource(KafkaUtil.getKafkaConsumer(topicName, groupId));
        //todo 4 转换数据结构
        SingleOutputStreamOperator<TestPaperExam> mapBeanStream = dataStreamSource.map(new MapFunction<String, TestPaperExam>() {
            @Override
            public TestPaperExam map(String value) throws Exception {
                JSONObject jsonObject = JSONObject.parseObject(value);
                return new TestPaperExam("","",jsonObject.getString("id"),jsonObject.getString("paper_id"),
                        jsonObject.getString("user_id"),jsonObject.getDouble("score"),jsonObject.getLong("duration_sec"),
                        //每条数据一人次
                        1L,"","",jsonObject.getLong("ts"));
            }
        });
        //todo 5 根据试卷id进行分组设置水位线
        KeyedStream<TestPaperExam, String> watermarkStream = mapBeanStream.assignTimestampsAndWatermarks(WatermarkStrategy.<TestPaperExam>forBoundedOutOfOrderness(Duration.ofSeconds(2L))
                .withTimestampAssigner(new SerializableTimestampAssigner<TestPaperExam>() {
                    @Override
                    public long extractTimestamp(TestPaperExam element, long recordTimestamp) {
                        return element.getTs() * 1000;
                    }
                })).keyBy(new KeySelector<TestPaperExam, String>() {
            @Override
            public String getKey(TestPaperExam value) throws Exception {
                return value.getPaperId();
            }
        });

        //todo 6 开窗聚合
        SingleOutputStreamOperator<avgTestPaperCourse> applyStream = watermarkStream.window(TumblingProcessingTimeWindows.of(Time.seconds(10L))).apply(new WindowFunction<TestPaperExam, avgTestPaperCourse, String, TimeWindow>() {
            @Override
            public void apply(String s, TimeWindow window, Iterable<TestPaperExam> input, Collector<avgTestPaperCourse> out) throws Exception {
                //累加考试分数
                Double sumScore = 0.0;
                //累加考试时长
                Double sumDurationSec = 0.0;
                //累加考试人数
                long userCount = 0L;
                for (TestPaperExam testPaperExam : input) {
                    sumScore += testPaperExam.getScore();
                    sumDurationSec += testPaperExam.getDurationSec();
                    userCount += testPaperExam.getUserCount();
                }
                //保留两位小数
                DecimalFormat df = new DecimalFormat("#.00");
                String avgScore = df.format(sumScore / userCount);
                String avgDurationSec = df.format(sumDurationSec / userCount);

                avgTestPaperCourse avgTestPaper = new avgTestPaperCourse();
                avgTestPaper.setAvgScore(avgScore);
                avgTestPaper.setAvgDurationSec(avgDurationSec);
                avgTestPaper.setUserCount(userCount);
                avgTestPaper.setPaperId(input.iterator().next().getPaperId());
                avgTestPaper.setStt(DateFormatUtil.toYmdHms(window.getStart()));
                avgTestPaper.setEdt(DateFormatUtil.toYmdHms(window.getEnd()));
                avgTestPaper.setTs(System.currentTimeMillis());
                out.collect(avgTestPaper);
            }
        });
        //applyStream.print("apply>>>>>>>>>");
        //todo 7 写入clickHouse中
        applyStream.addSink(ClickHouseUtil.getClickHouseSinkFunc("insert into dwd_test_exam_paper_window values(?,?,?,?,?,?,?,?)"));

        //todo 8 执行任务
        env.execute();

    }
}
