package com.atguigu.edu.realtime.app.dws;


import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.edu.realtime.app.func.DimAsyncFunction;
import com.atguigu.edu.realtime.bean.CoursePaperBean;
import com.atguigu.edu.realtime.bean.TradeProvinceOrderBean;
import com.atguigu.edu.realtime.util.DateFormatUtil;
import com.atguigu.edu.realtime.util.MyClickhouseUtil;
import com.atguigu.edu.realtime.util.MyKafkaUtil;
import com.atguigu.edu.realtime.util.TimestampLtz3CompareUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import ru.yandex.clickhouse.ClickHouseUtil;

import java.io.IOException;
import java.util.Collections;
import java.util.HashSet;
import java.util.concurrent.TimeUnit;
//卢顺祥  DWS考试域试卷粒度-分数统计表
public class DwsCourseTestExam {
    public static void main(String[] args) throws Exception {

        // TODO 1. 环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment
                .createLocalEnvironmentWithWebUI(new Configuration());
        env.setParallelism(4);

        // TODO 2. 检查点相关设置(略)
        // TODO 3. 从 Kafka dwd_course_test_exam 主题读取课程数据
        String topic = "dwd_course_test_exam";
        String groupId = "dws_trade_province_order_window";

        FlinkKafkaConsumer<String> kafkaConsumer = MyKafkaUtil.getKafkaConsumer(topic, groupId);
        DataStreamSource<String> source = env.addSource(kafkaConsumer);

        // TODO 4.转换数据结构
        SingleOutputStreamOperator<JSONObject> mappedStream = source.map(JSON::parseObject);

        // TODO 5. 按照 user_id 分组
        KeyedStream<JSONObject, String> keyedStream = mappedStream.keyBy(r -> r.getString("user_id"));

        // TODO 6. 去重
        SingleOutputStreamOperator<JSONObject> processedStream = keyedStream.process(
                new KeyedProcessFunction<String, JSONObject, JSONObject>() {

                    private ValueState<JSONObject> lastValueState;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        super.open(parameters);
                        lastValueState = getRuntimeContext().getState(
                                new ValueStateDescriptor<JSONObject>("last_value_state", JSONObject.class)
                        );
                    }

                    @Override
                    public void processElement(JSONObject jsonObj, Context ctx, Collector<JSONObject> out) throws Exception {
                        JSONObject lastValue = lastValueState.value();
                        if (lastValue == null) {
                            long currentProcessingTime = ctx.timerService().currentProcessingTime();
                            ctx.timerService().registerProcessingTimeTimer(currentProcessingTime + 5000L);
                            lastValueState.update(jsonObj);
                        } else {
                            String lastRowOpTs = lastValue.getString("ts");
                            String rowOpTs = jsonObj.getString("ts");
                            if (TimestampLtz3CompareUtil.compare(lastRowOpTs, rowOpTs) <= 0) {
                                lastValueState.update(jsonObj);
                            }
                        }
                    }

                    @Override
                    public void onTimer(long timestamp, OnTimerContext ctx, Collector<JSONObject> out) throws IOException {
                        JSONObject lastValue = this.lastValueState.value();
                        if (lastValue != null) {
                            out.collect(lastValue);
                        }
                        lastValueState.clear();
                    }
                }
        );

        // TODO 7. 转换数据结构
        SingleOutputStreamOperator<CoursePaperBean> javaBeanStream = processedStream.map(
                jsonObj -> {
                    String paper_id = jsonObj.getString("paper_id");
                    //String date_id = jsonObj.getString("date_id");
                    Double score = jsonObj.getDouble("score");
                    Long ts = jsonObj.getLong("ts") * 1000L;

                    CoursePaperBean tradeProvinceOrderWindow = CoursePaperBean.builder()
                            .paperId(paper_id)
                            .orderCount(1L)
                            .orderIdSet(new HashSet<String>(
                                    Collections.singleton(paper_id)
                            ))
                            .score(score)
                            .ts(ts)
                            .build();
                    return tradeProvinceOrderWindow;
                }
        );


        // TODO 8. 设置水位线
        SingleOutputStreamOperator<CoursePaperBean> withWatermarkStream = javaBeanStream.assignTimestampsAndWatermarks(
                WatermarkStrategy
                        .<CoursePaperBean>forMonotonousTimestamps()
                        .withTimestampAssigner(
                                new SerializableTimestampAssigner<CoursePaperBean>() {
                                    @Override
                                    public long extractTimestamp(CoursePaperBean javaBean, long recordTimestamp) {
                                        return javaBean.getTs();
                                    }
                                }
                        )
        );

        // TODO 9. 按照试卷 ID 分组
        KeyedStream<CoursePaperBean, String> keyedByProIdStream =
                withWatermarkStream.keyBy(CoursePaperBean::getPaperId);

        // TODO 10. 开窗
        WindowedStream<CoursePaperBean, String, TimeWindow> windowDS = keyedByProIdStream.window(TumblingEventTimeWindows.of(
                org.apache.flink.streaming.api.windowing.time.Time.seconds(10L)
        ));

        // TODO 11. 聚合计算
        SingleOutputStreamOperator<CoursePaperBean> reducedStream = windowDS.reduce(
                new ReduceFunction<CoursePaperBean>() {
                    @Override
                    public CoursePaperBean reduce(CoursePaperBean value1, CoursePaperBean value2) throws Exception {
                        value1.getOrderIdSet().addAll(
                                value2.getOrderIdSet()
                        );
                        value1.setOrderCount(
                                value1.getOrderCount() + value2.getOrderCount()
                        );
                        return value1;
                    }
                },
                new ProcessWindowFunction<CoursePaperBean, CoursePaperBean, String, TimeWindow>() {
                    @Override
                    public void process(String s, Context context, Iterable<CoursePaperBean> elements, Collector<CoursePaperBean> out) throws Exception {
                        String stt = DateFormatUtil.toYmdHms(context.window().getStart());
                        String edt = DateFormatUtil.toYmdHms(context.window().getEnd());
                        for (CoursePaperBean element : elements) {
                            element.setStt(stt);
                            element.setEdt(edt);
                            element.setOrderCount((long) element.getOrderIdSet().size());
                            element.setTs(System.currentTimeMillis());
                            out.collect(element);
                        }
                    }
                }
        );
        reducedStream.print();

        // TODO 12. 关联省份信息
//        SingleOutputStreamOperator<CoursePaperBean> fullInfoStream = AsyncDataStream.unorderedWait(
//                reducedStream,
//                new DimAsyncFunction<CoursePaperBean>("dim_base_province".toUpperCase()) {
//
//                    @Override
//                    public void join(CoursePaperBean javaBean, JSONObject jsonObj) throws Exception {
//                        String provinceName = jsonObj.getString("name".toUpperCase());
//                        javaBean.setProvinceName(provinceName);
//                    }
//
//                    @Override
//                    public String getKey(CoursePaperBean javaBean) {
//                        return javaBean.getProvinceId();
//                    }
//                },
//                60 * 50, TimeUnit.SECONDS
//        );
//
////        fullInfoStream.print(">>>");
        // TODO 13. 写入到 OLAP 数据库
        SinkFunction<CoursePaperBean> jdbcSink = MyClickhouseUtil.<CoursePaperBean>getSinkFunction(
                "insert into dws_Course_exam_window values(?,?,?,?,?,?)"
        );
        reducedStream.<CoursePaperBean>addSink(jdbcSink);

        env.execute();
    }
}
