package com.atguigu.edu.realtime.app.dws.course;


import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.edu.realtime.bean.TradeCourseOrderBean;
import com.atguigu.edu.realtime.util.DateFormatUtil;
import com.atguigu.edu.realtime.util.KafkaUtil;
import com.atguigu.edu.realtime.util.TimestampLtz3CompareUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;

import java.time.Duration;
import java.util.Collections;
import java.util.HashSet;

public class DwsTradeCourseOrderWindow {
    public static void main(String[] args) throws Exception {
        // TODO: 1. 基本环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(3);
        //TODO 2.检查点相关设置(略)
        //TODO 3.从kakfa主题中读取数据
        //3.1 声明消费的主题已经消费者组
        String topic = "dwd_trade_order_detail";
        String groupId = "dws_trade_course_order_group";
        // 3.2 创建消费者对象
        FlinkKafkaConsumer<String> kafkaConsumer = KafkaUtil.getKafkaConsumer(topic, groupId);
        // 3.3 消费数据，封装流
        DataStreamSource<String> kafkaDS = env.addSource(kafkaConsumer);

        // todo 4.对数据进行类型转换 jsonStr -> jsonObj
        SingleOutputStreamOperator<JSONObject> jsonObjDS = kafkaDS.map(str -> JSON.parseObject(str));

        //jsonObjDS.print();
        // {"course_id":"263","coupon_reduce":"0.0","od_ts":"1662033303",
        // "create_time":"2022-09-01 19:55:04","course_name":"Docker核心技术",
        // "expire_time":"2022-09-01 20:10:04",
        // "session_id":"cf072f33-deca-42ac-9d21-ef8df1e312f9",
        // "oi_ts":"1662033303","date_id":"2022-09-01","expire_time_date":"2022-09-01",
        // "origin_amount":"200.0","user_id":"782","province_id":"29",
        // "row_op_ts":"2022-09-01 11:55:03.833Z",
        // "final_amount":"200.0","id":"33575","order_id":"31136"}
        // todo 5.按照订单明细id进行分组，去除订单明细表中因左外连接产生的重复数据
        KeyedStream<JSONObject, String> orderDetailIdKeyedDs = jsonObjDS.keyBy(josnObj -> josnObj.getString("id"));

        // todo 6.使用flink状态 + 定时器进行去重
        SingleOutputStreamOperator<JSONObject> distinctDs = orderDetailIdKeyedDs.process(
                new KeyedProcessFunction<String, JSONObject, JSONObject>() {
                    private ValueState<JSONObject> lastJsonObjState;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        lastJsonObjState
                                = getRuntimeContext().getState(new ValueStateDescriptor<JSONObject>("lastJsonObjState", JSONObject.class));
                    }

                    @Override
                    public void processElement(JSONObject jsonObj, Context ctx, Collector<JSONObject> out) throws Exception {
                        JSONObject lastJSONObj = lastJsonObjState.value();
                        if (lastJSONObj == null) {
                            lastJsonObjState.update(jsonObj);
                            // 创建定时器
                            long currentProcessTime = ctx.timerService().currentProcessingTime();
                            ctx.timerService().registerProcessingTimeTimer(currentProcessTime + 5000L);
                        } else {
                            // 如果已经有一条当前id的数据了，那么比较两条数据进入kafka的时间，时间靠后的进入valueState,进入kafka主题dwd_trade_order_detail的时间字段为row_op_ts
                            String lastRowOpTs = lastJSONObj.getString("row_op_ts");
                            String rowOpTs = jsonObj.getString("row_op_ts");
                            if (TimestampLtz3CompareUtil.compare(lastRowOpTs, rowOpTs) <= 0)
                                lastJsonObjState.update(jsonObj);
                        }
                    }

                    @Override
                    public void onTimer(long timestamp, OnTimerContext ctx, Collector<JSONObject> out) throws Exception {
                        JSONObject lastJsonObj = lastJsonObjState.value();
                        if (lastJsonObj != null) {
                            out.collect(lastJsonObj);
                        }

                        lastJsonObjState.clear();
                    }
                }
        );
        // todo: 7. 再次进行类型转换， JSONObj -> 要统计的实体类对象
        SingleOutputStreamOperator<TradeCourseOrderBean> courseOrderBeanDs = distinctDs.map(
                new MapFunction<JSONObject, TradeCourseOrderBean>() {
                    @Override
                    public TradeCourseOrderBean map(JSONObject jsonObj) throws Exception {
                        String userId = jsonObj.getString("user_id");
                        String courseId = jsonObj.getString("course_id");
                        String courseName = jsonObj.getString("course_name");
                        String orderId = jsonObj.getString("order_id");
                        Double originAmount = jsonObj.getDouble("origin_amount");
                        Double couponReduce = jsonObj.getDouble("coupon_reduce");
                        Double finalAmount = jsonObj.getDouble("final_amount");

                        Long ts = jsonObj.getLong("od_ts") * 1000L;
                        TradeCourseOrderBean courseOrderBean = TradeCourseOrderBean.builder()
                                .userId(userId)
                                .courseId(courseId)
                                .courseName(courseName)
                                .orderId(orderId)
                                .originAmount(originAmount)
                                .couponReduce(couponReduce == null ? 0.0 : couponReduce)
                                .finalAmount(finalAmount)
                                .orderIdSet(new HashSet<>(
                                        Collections.singleton(orderId)
                                ))
                                .orderUuCount(0L)
                                .ts(ts)
                                .build();

                        return courseOrderBean;
                    }
                }
        );

        // todo 8. 指定waterMark以及提取时间时间字段
        SingleOutputStreamOperator<TradeCourseOrderBean> withWaterMarkDS = courseOrderBeanDs.assignTimestampsAndWatermarks(
                WatermarkStrategy
                        .<TradeCourseOrderBean>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                        .withTimestampAssigner(
                                new SerializableTimestampAssigner<TradeCourseOrderBean>() {
                                    @Override
                                    public long extractTimestamp(TradeCourseOrderBean element, long recordTimestamp) {
                                        return element.getTs();
                                    }
                                }
                        )
        );

        // todo 9. 根据课程id + 用户id进行分组，记录同一课程的独立下单用户
        KeyedStream<TradeCourseOrderBean, String> courseIdAndUserIdKeyedDS = withWaterMarkDS.keyBy(courseBean -> courseBean.getCourseId() + courseBean.getUserId());


        // todo 10. 判断是否为课程下单独立用户数，

        SingleOutputStreamOperator<TradeCourseOrderBean> uUCourseOrderDS = courseIdAndUserIdKeyedDS.process(

                new KeyedProcessFunction<String, TradeCourseOrderBean, TradeCourseOrderBean>() {
                    ValueState<String> booleanValueState;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        booleanValueState = getRuntimeContext().getState(new ValueStateDescriptor<>("booleanState", String.class));
                    }

                    @Override
                    public void processElement(TradeCourseOrderBean tradeCourseOrderBean, Context ctx, Collector<TradeCourseOrderBean> out) throws Exception {
                        String lastDate = booleanValueState.value();
                        String curDate = DateFormatUtil.toDateString(tradeCourseOrderBean.getTs());
                        //如果是当天的第一天数据或者已经是第二天的数据，更新状态并标记为独立用户
                        if (StringUtils.isEmpty(lastDate) || !lastDate.equals(curDate)) {
                            booleanValueState.update(curDate);

                            tradeCourseOrderBean.setOrderUuCount(1L);
                        }
                        out.collect(tradeCourseOrderBean);
                    }
                }
        );

        // todo 11. 分组，按照课程进行分组，对分组的数据进行开窗聚合计算
        KeyedStream<TradeCourseOrderBean, String> courseIdKeyedDS = uUCourseOrderDS.keyBy(courseOrderBean -> courseOrderBean.getCourseId());

        // todo 12. 开窗
        WindowedStream<TradeCourseOrderBean, String, TimeWindow> windowDs = courseIdKeyedDS.window(TumblingEventTimeWindows.of(Time.seconds(10)));

        // todo 13. 聚合计算
        SingleOutputStreamOperator<TradeCourseOrderBean> reduceDs = windowDs.reduce(
                new ReduceFunction<TradeCourseOrderBean>() {
                    @Override
                    public TradeCourseOrderBean reduce(TradeCourseOrderBean value1, TradeCourseOrderBean value2) throws Exception {
                        value1.getOrderIdSet().addAll(value2.getOrderIdSet());
                        value1.setOrderUuCount(value1.getOrderUuCount() + value2.getOrderUuCount());
                        value1.setOriginAmount(value1.getOriginAmount() + value2.getOriginAmount());
                        value1.setCouponReduce(value1.getCouponReduce() + value2.getCouponReduce());
                        value1.setFinalAmount(value1.getFinalAmount() + value2.getFinalAmount());
                        return value1;
                    }
                },
                new WindowFunction<TradeCourseOrderBean, TradeCourseOrderBean, String, TimeWindow>() {
                    @Override
                    public void apply(String s, TimeWindow window, Iterable<TradeCourseOrderBean> input, Collector<TradeCourseOrderBean> out) throws Exception {
                        String stt = DateFormatUtil.toDateTimeString(window.getStart());
                        String edt = DateFormatUtil.toDateTimeString(window.getEnd());

                        for (TradeCourseOrderBean tradeCourseOrderBean : input) {
                            tradeCourseOrderBean.setStt(stt);
                            tradeCourseOrderBean.setEdt(edt);
                            tradeCourseOrderBean.setTs(System.currentTimeMillis());
                            tradeCourseOrderBean.setOrderCount((long) tradeCourseOrderBean.getOrderIdSet().size());
                            out.collect(tradeCourseOrderBean);
                        }

                    }
                }
        );

        reduceDs.print();
        env.execute();
    }

}
