package com.atguigu.edu.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.edu.app.func.DimAsyncFunction;
import com.atguigu.edu.bean.TestPaperExam;
import com.atguigu.edu.util.ClickHouseUtil;
import com.atguigu.edu.util.DateFormatUtil;
import com.atguigu.edu.util.KafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import org.junit.Test;

import java.time.Duration;

public class DwsFractionalTestPaperWindow {
    public static void main(String[] args) throws Exception {
        //todo 1 初始化环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //todo 2 设置状态后端
        /*env.enableCheckpointing(5 * 1000L, CheckpointingMode.AT_LEAST_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(3 * 60 *1000L);
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
        System.setProperty("HADOOP_USER_NAME","atguigu");
        env.setStateBackend(new HashMapStateBackend());*/

        //todo 3 从kafka获取主题数据
        String topicName = "dwd_test_exam_paper";
        String groupId = "dws_fractional_test_paper_window";
        DataStreamSource<String> dataStreamSource = env.addSource(KafkaUtil.getKafkaConsumer(topicName, groupId));

        //todo 4 转换数据
        SingleOutputStreamOperator<TestPaperExam> paperStream = dataStreamSource.map(new MapFunction<String, TestPaperExam>() {
            @Override
            public TestPaperExam map(String value) throws Exception {
                JSONObject jsonObject = JSONObject.parseObject(value);
                TestPaperExam testPaperExam = new TestPaperExam();
                testPaperExam.setId(jsonObject.getString("id"));
                testPaperExam.setPaperId(jsonObject.getString("paper_id"));
                testPaperExam.setUserId(jsonObject.getString("user_id"));
                testPaperExam.setScore(jsonObject.getDouble("score"));
                testPaperExam.setDurationSec(jsonObject.getLong("duration_sec"));
                testPaperExam.setUserCount(1L);
                testPaperExam.setTs(jsonObject.getLong("ts"));
                //根据分数设定等级
                Double score = testPaperExam.getScore();
                if (score < 40) {
                    testPaperExam.setPointLevel("D");
                } else if (40 <= score && score < 60) {
                    testPaperExam.setPointLevel("C");
                } else if (60 <= score && score < 80) {
                    testPaperExam.setPointLevel("B");
                } else {
                    testPaperExam.setPointLevel("A");
                }
                return testPaperExam;
            }
        });

        KeyedStream<TestPaperExam, String> keyByStream = paperStream.keyBy(testPaperExam -> testPaperExam.getPaperId());
        SingleOutputStreamOperator<TestPaperExam> watermarkStream = keyByStream.assignTimestampsAndWatermarks(WatermarkStrategy.<TestPaperExam>forBoundedOutOfOrderness(Duration.ofSeconds(2L)).withTimestampAssigner(new SerializableTimestampAssigner<TestPaperExam>() {
            @Override
            public long extractTimestamp(TestPaperExam element, long recordTimestamp) {
                return element.getTs() * 1000L;
            }
        }));
        //todo 5 根据分数段聚合
        KeyedStream<TestPaperExam, String> pointLevelStream = watermarkStream.keyBy(new KeySelector<TestPaperExam, String>() {
            @Override
            public String getKey(TestPaperExam value) throws Exception {
                return value.getPointLevel();
            }
        });
        pointLevelStream.print("pointLevel>>>>>>");
        SingleOutputStreamOperator<TestPaperExam> reduceStream = pointLevelStream
                .window(TumblingEventTimeWindows.of(Time.seconds(10L)))
                .reduce(new ReduceFunction<TestPaperExam>() {
                    @Override
                    public TestPaperExam reduce(TestPaperExam value1, TestPaperExam value2) throws Exception {
                        value1.setUserCount(value1.getUserCount() + value2.getUserCount());
                        return value1;
                    }
                }, new WindowFunction<TestPaperExam, TestPaperExam, String, TimeWindow>() {
                    @Override
                    public void apply(String s, TimeWindow window, Iterable<TestPaperExam> input, Collector<TestPaperExam> out) throws Exception {
                        TestPaperExam next = input.iterator().next();
                        next.setStt(DateFormatUtil.toYmdHms(window.getStart()));
                        next.setEdt(DateFormatUtil.toYmdHms(window.getEnd()));
                        next.setTs(System.currentTimeMillis());
                        next.setUserId("");
                        next.setId("");
                        next.setDurationSec(0L);
                        next.setScore(0.00);
                        out.collect(next);
                    }
                });
        reduceStream.print("reduce>>>>>>>>");
        //todo 6 写出到clickHouse中
        reduceStream.addSink(ClickHouseUtil.getClickHouseSinkFunc("insert into dws_fractional_test_paper_window values(?,?,?,?,?,?,?,?,?,?,?)"));
        //todo 7  执行任务
        env.execute(groupId);

    }
}
