package com.atguigu.realtime.app.dws;


import com.alibaba.fastjson.JSONObject;
import com.atguigu.realtime.bean.TestExamCourseBean;
import com.atguigu.realtime.utils.*;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.time.Duration;
import java.util.Collections;
import java.util.HashSet;


public class DwsTestExamCourseWindow {
    public static void main(String[] args) throws Exception {
        // TODO 1 环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // TODO 2 设置状态后端
        //
        //env.enableCheckpointing(5 * 60 * 1000L, CheckpointingMode.EXACTLY_ONCE );
        //env.getCheckpointConfig().setCheckpointTimeout( 3 * 60 * 1000L );
        //env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        //env.setStateBackend(new HashMapStateBackend());
        //env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
        //System.setProperty("HADOOP_USER_NAME", "atguigu");
        //
        //todo 3 读取 kafka 中 dwd_test_exam 表
        String topicName = "dwd_test_exam";
        String groupID = "dws_test_exam_course_count_window";
        DataStreamSource<String> streamSource = env.addSource(KafkaUtil.getKafkaConsumer(topicName, groupID));

        //todo 4 转换结构
        SingleOutputStreamOperator<TestExamCourseBean> mapStream = streamSource.map(new MapFunction<String, TestExamCourseBean>() {
            @Override
            public TestExamCourseBean map(String value) throws Exception {
                JSONObject jsonObject = JSONObject.parseObject(value);

                return TestExamCourseBean.builder()
                        .paperId(jsonObject.getString("paper_id"))
                        .userId(jsonObject.getString("user_id"))
                        .score(Double.parseDouble(jsonObject.getString("score")))
                        .durationSec(Long.parseLong(jsonObject.getString("duration_sec")))
                        .ts(DateFormatUtil.toTs(jsonObject.getString("create_time"),true))
                        .uidSet(new HashSet<>(Collections.singleton(jsonObject.getString("user_id"))))
                        .build();
            }
        });

        //todo 5 开窗聚合
        //设置水位线
        SingleOutputStreamOperator<TestExamCourseBean> watermarks = mapStream.assignTimestampsAndWatermarks(WatermarkStrategy.
                <TestExamCourseBean>forBoundedOutOfOrderness(Duration.ofSeconds(2L)).withTimestampAssigner(new SerializableTimestampAssigner<TestExamCourseBean>() {
            @Override
            public long extractTimestamp(TestExamCourseBean element, long recordTimestamp) {
                return element.getTs();
            }
        }));
        //分组
        KeyedStream<TestExamCourseBean, String> keyedStream = watermarks.keyBy(new KeySelector<TestExamCourseBean, String>() {
            @Override
            public String getKey(TestExamCourseBean value) throws Exception {
                return value.getPaperId();
            }
        });
        //开窗
        WindowedStream<TestExamCourseBean, String, TimeWindow> windowedStream = keyedStream.window(TumblingEventTimeWindows.of(Time.seconds(10L)));
        //聚合
        SingleOutputStreamOperator<TestExamCourseBean> reduceStream = windowedStream.reduce(new ReduceFunction<TestExamCourseBean>() {
            @Override
            public TestExamCourseBean reduce(TestExamCourseBean value1, TestExamCourseBean value2) throws Exception {
                value1.setScore(value1.getScore() + value2.getScore());
                value1.setDurationSec(value1.getDurationSec() + value2.getDurationSec());
                value1.getUidSet().addAll(value2.getUidSet());
                return value1;
            }
        }, new WindowFunction<TestExamCourseBean, TestExamCourseBean, String, TimeWindow>() {
            @Override
            public void apply(String s, TimeWindow window, Iterable<TestExamCourseBean> input, Collector<TestExamCourseBean> out) throws Exception {
                TestExamCourseBean bean = input.iterator().next();
                bean.setStt(DateFormatUtil.toYmdHms(window.getStart()));
                bean.setEdt(DateFormatUtil.toYmdHms(window.getEnd()));
                bean.setTs(System.currentTimeMillis());
                bean.setNumber(bean.getUidSet().size());
                bean.setAvgSec((double) (bean.getDurationSec() / bean.getNumber()));
                bean.setAvgScore(bean.getScore() / bean.getNumber());
                out.collect(bean);
            }
        });

        //todo 6 关联dim层
       /* SingleOutputStreamOperator<TestExamCourseBean> resultStream = mapStream.map(new RichMapFunction<TestExamCourseBean, TestExamCourseBean>() {
            DruidDataSource dataSource = null;

            @Override
            public void open(Configuration parameters) throws Exception {
                dataSource = DruidPhoenixDSUtil.getDataSource();
            }

            @Override
            public TestExamCourseBean map(TestExamCourseBean value) throws Exception {
                //dim_test_paper
                List<JSONObject> testPaper = DimUtil.getDimInfo(dataSource.getConnection(), "DIM_TEST_PAPER", value.getPaperId());
                if (testPaper.size() > 0) {
                    JSONObject tp = testPaper.get(0);
                    value.setCourseId(tp.getString("courseId"));

                }

                //dim_course_info
                List<JSONObject> courseInfo = DimUtil.getDimInfo(dataSource.getConnection(), "DIM_COURSE_INFO", value.getCourseId());
                if (courseInfo.size()>0){
                    JSONObject jsonObject = courseInfo.get(0);
                    value.setCourseName(jsonObject.getString("courseName"));
                }

                return value;

            }
        });*/

//        resultStream.print();
reduceStream.print();
        //写入到click house
        reduceStream.addSink(ClickHouseUtil.getClickHouseSinkFunc("insert into dws_test_exam_course_count_window values(?,?,?,?,?,?,?,?)"));


        //执行
        env.execute();


    }
}
