package com.atguigu.edu.app.dws;

import com.alibaba.fastjson.JSONObject;
import com.atguigu.edu.bean.AnswerStationStatistics;
import com.atguigu.edu.bean.TestExamPaperQuestion;
import com.atguigu.edu.util.ClickHouseUtil;
import com.atguigu.edu.util.DateFormatUtil;
import com.atguigu.edu.util.KafkaUtil;
import com.sun.jersey.core.util.StringIgnoreCaseKeyComparator;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.text.DecimalFormat;
import java.time.Duration;
import java.util.HashMap;

public class DwsAnswerStationWindow {
    public static void main(String[] args) throws Exception {
        //todo 1 初始化环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //todo 2 设置状态后端
        /*env.enableCheckpointing(5 * 1000L, CheckpointingMode.AT_LEAST_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(3 * 60 *1000L);
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
        System.setProperty("HADOOP_USER_NAME","atguigu");
        env.setStateBackend(new HashMapStateBackend());*/

        //todo 3 从kafka获取主题数据
        String topicName = "test_exam_question";
        String groupId = "dws_answer_station_window";
        DataStreamSource<String> dataStreamSource = env.addSource(KafkaUtil.getKafkaConsumer(topicName, groupId));

        //TODO 4 转换数据类型
        SingleOutputStreamOperator<TestExamPaperQuestion> mapBeanStream = dataStreamSource.map(new MapFunction<String, TestExamPaperQuestion>() {
            @Override
            public TestExamPaperQuestion map(String value) throws Exception {
                JSONObject jsonObject = JSONObject.parseObject(value);
                jsonObject.toJSONString();
                TestExamPaperQuestion testExamPaperQuestion = new TestExamPaperQuestion();
                testExamPaperQuestion.setId(jsonObject.getString("id"));
                testExamPaperQuestion.setExamId(jsonObject.getString("exam_id"));
                testExamPaperQuestion.setPaperId(jsonObject.getString("paper_id"));
                testExamPaperQuestion.setQuestionId(jsonObject.getString("question_id"));
                testExamPaperQuestion.setUserId(jsonObject.getString("user_id"));
                testExamPaperQuestion.setIsCorrect(jsonObject.getString("is_correct"));
                testExamPaperQuestion.setScore(jsonObject.getDouble("score"));
                testExamPaperQuestion.setTs(jsonObject.getLong("ts"));
                testExamPaperQuestion.setAnsCount(1);
                if(("1").equals(jsonObject.getString("is_correct"))){
                    testExamPaperQuestion.setCorrAnsCount(1);
                }else{
                    testExamPaperQuestion.setCorrAnsCount(0);
                }
                return testExamPaperQuestion;
            }
        });

        //todo 5 设置水位线
        SingleOutputStreamOperator<TestExamPaperQuestion> watermarkStream = mapBeanStream.assignTimestampsAndWatermarks(WatermarkStrategy
                .<TestExamPaperQuestion>forBoundedOutOfOrderness(Duration.ofSeconds(2L))
                .withTimestampAssigner(new SerializableTimestampAssigner<TestExamPaperQuestion>() {
                    @Override
                    public long extractTimestamp(TestExamPaperQuestion element, long recordTimestamp) {
                        return element.getTs() * 1000L;
                    }
                }));

        //todo 6 根据questionId分组
        //叠加正确答题次数 答题次数 计算正确率
        KeyedStream<TestExamPaperQuestion, String> keyByQuestionIdStream = watermarkStream
                .keyBy(TestExamPaperQuestion -> TestExamPaperQuestion.getQuestionId());
        SingleOutputStreamOperator<AnswerStationStatistics> reduceStream = keyByQuestionIdStream
                .window(TumblingEventTimeWindows.of(Time.seconds(10L)))
                .apply(new WindowFunction<TestExamPaperQuestion, AnswerStationStatistics, String, TimeWindow>() {
            @Override
            public void apply(String s, TimeWindow window, Iterable<TestExamPaperQuestion> input, Collector<AnswerStationStatistics> out) throws Exception {
                //回答问题数
                int ansCount = 0;
                //正确回答问题数
                int corrAnsCount = 0;

                //正确答题独立用户数
                HashMap<String, String> corrAnsUserCountMap = new HashMap();
                //答题独立用户数
                HashMap<String, String> ansUserCountMap = new HashMap();
                for (TestExamPaperQuestion testExamPaperQuestion : input) {
                    testExamPaperQuestion.toString();
                    ansCount += testExamPaperQuestion.getAnsCount();
                    //累计 答题独立用户数
                    ansUserCountMap.put(testExamPaperQuestion.getUserId(), testExamPaperQuestion.getUserId());
                    //累计 正确答题独立用户数
                    if ("1".equals(testExamPaperQuestion.getIsCorrect())) {
                        corrAnsUserCountMap.put(testExamPaperQuestion.getUserId(), testExamPaperQuestion.getUserId());
                        corrAnsCount += testExamPaperQuestion.getCorrAnsCount();

                    }
                }

                //保留两位小数
                //正确率
                DecimalFormat df = new DecimalFormat("0.00");//格式化小数
                String accuracy = df.format((float)corrAnsCount / ansCount);
                //正确答题用户占比
                int corrAnsUserCountSize = corrAnsUserCountMap.size();
                int ansUserCountMapSize = ansUserCountMap.size();
                String userCountAccuracy = df.format((float)corrAnsUserCountSize/ansUserCountMapSize);
                AnswerStationStatistics answerStationStatistics = new AnswerStationStatistics();
                //正确率
                answerStationStatistics.setAccuracy(accuracy);
                //答题次数
                answerStationStatistics.setAnsCount(ansCount);
                //正确答题次数
                answerStationStatistics.setCorrAnsCount(corrAnsCount);
                //正确答题独立用户数
                answerStationStatistics.setCorrAnsUserCount(corrAnsUserCountMap.size());
                //答题独立用户数
                answerStationStatistics.setAnsUserCount(ansUserCountMap.size());
                //正确答题用户占比
                answerStationStatistics.setUserCountAccuracy(userCountAccuracy);
                answerStationStatistics.setStt(DateFormatUtil.toYmdHms(window.getStart()));
                answerStationStatistics.setEdt(DateFormatUtil.toYmdHms(window.getEnd()));
                answerStationStatistics.setTs(System.currentTimeMillis());
                out.collect(answerStationStatistics);
            }
        });
        //todo 7 写入clickHouse中
        reduceStream.addSink(ClickHouseUtil.getClickHouseSinkFunc("insert into dws_answer_station_window values " +
                "(?,?,?,?,?,?,?,?,?)"));

        //todo 8 执行任务
        env.execute(groupId);

    }
}
