package com.atguigu.edu.realtime.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.edu.realtime.app.func.BeanToJsonStrFunction;
import com.atguigu.edu.realtime.app.func.DimAsyncFunction;
import com.atguigu.edu.realtime.bean.TradeProvinceOrderBean;
import com.atguigu.edu.realtime.util.DateFormatUtil;
import com.atguigu.edu.realtime.util.DorisUtil;
import com.atguigu.edu.realtime.util.MyKafkaUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.math.BigDecimal;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.TimeUnit;

/**
 * ClassName: DwsTradeProvinceOrderWindow
 * Package: com.atguigu.edu.realtime.app.dws
 * Description:
 *
 * @Author Mr.2
 * @Create 2023/9/12 8:58
 * @Version 1.0
 */
public class DwsTradeProvinceOrderWindow {
    public static void main(String[] args) {
        // TODO 1. 基本环境准备
        // 1.1 创建 流式执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // 1.2 设置并行度
        env.setParallelism(4);

        // TODO 2. 检查点相关的设置
        // 2.1 开启检查点
        env.enableCheckpointing(100000L, CheckpointingMode.EXACTLY_ONCE);
        // 2.2 设置 检查点超时时间
//        env.getCheckpointConfig().setCheckpointTimeout(3000L);
        // 2.3 设置 jab取消后，检查点是否保留 -- 本次job取消采取保留
//        env.getCheckpointConfig().setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        // 2.4 设置 检查点重启策略 -- 本次采取故障率重启
//        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30L), Time.seconds(3)));
        // 2.5 设置 两个检查点之间的最小时间间隔
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000L);
        // 2.6 设置 状态后端 -即检查点的保存位置
//        env.setStateBackend(new HashMapStateBackend());
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/edu0909/checkpoint");
        // 2.7 设置 系统的操作用户 hadoop_user_name
//        System.setProperty("HADOOP_USER_NAME", "atguigu");

        // TODO 3. From kafka topic读取数据
        // 3.1 声明 消费者主题 和 消费者组
        String topic = "dwd_trade_order_detail";
        String groupId = "dwd_trade_province_order_window_group_01";
        // 3.2 创建消费者对象
        KafkaSource<String> kafkaSource = MyKafkaUtil.getKafkaSource(topic, groupId);
        // 3.3 消费数据 封装为流
        DataStreamSource<String> kafkaStrStream =
                env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "kafka_source_01");

        // TODO 4. 过滤空消息，对流中数据，进行类型转换 JSONString -> JSONObject
        SingleOutputStreamOperator<JSONObject> jsonObjStream = kafkaStrStream.process(
                new ProcessFunction<String, JSONObject>() {
                    @Override
                    public void processElement(String jsonString, Context ctx, Collector<JSONObject> out) throws Exception {
                        if (StringUtils.isNotEmpty(jsonString)) {
                            JSONObject jsonObject = JSON.parseObject(jsonString);
                            out.collect(jsonObject);
                        }
                    }
                }
        );

        // TODO 5. 按照唯一键(订单明细id) 进行分组
        KeyedStream<JSONObject, String> orderDetailIdKeyedByStream = jsonObjStream.keyBy(
                new KeySelector<JSONObject, String>() {
                    @Override
                    public String getKey(JSONObject jsonObj) throws Exception {
                        return jsonObj.getString("id");
                    }
                }
        );
        // For test output -> yes
//        orderDetailIdKeyedByStream.print(">>>>");

        // TODO 6. 去重
        // 6.1 去重方式1: 状态 + 定时器
        // 6.2 去重方式2: 状态 + 抵消
        SingleOutputStreamOperator<JSONObject> distinctStream = orderDetailIdKeyedByStream.process(
                new KeyedProcessFunction<String, JSONObject, JSONObject>() {
                    // 声明状态
                    private ValueState<JSONObject> lastJsonObjState;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        // 状态初始化
                        ValueStateDescriptor<JSONObject> valueStateDescriptor =
                                new ValueStateDescriptor<>("lastJsonObjState", JSONObject.class);
                        // 思考: 设置状态保留时间 --
                        valueStateDescriptor.enableTimeToLive(StateTtlConfig.newBuilder(Time.seconds(10L)).build());
                        lastJsonObjState = getRuntimeContext().getState(valueStateDescriptor);
                    }

                    @Override
                    public void processElement(JSONObject jsonObj, Context ctx, Collector<JSONObject> out) throws Exception {
                        // 获取 上次
                        JSONObject lastJsonObj = lastJsonObjState.value();
                        // 第2条数据来了，判断上条数据 有没有
                        // Note that: 因为只统计最终金额，所以将状态中影响到度量值(订单数、下单最终金额)进行取反，传递到下游
                        if (lastJsonObj != null) {

                            String finalAmount = lastJsonObj.getString("final_amount");

                            // 取反操作
                            lastJsonObj.put("final_amount", "-" + finalAmount);
                            // 传递到下游
                            out.collect(lastJsonObj);
                        }
                        // 第1条 数据来了，为保证时效性, 都要往下游传递
                        out.collect(jsonObj);
                        lastJsonObjState.update(jsonObj);
                    }
                }
        );

        // TODO 7. 再次 对流中数据进行类型转换, (JSONObject -> 实体类对象TradeProvinceOrderBean)
        SingleOutputStreamOperator<TradeProvinceOrderBean> orderBeanStream = distinctStream.map(
                new MapFunction<JSONObject, TradeProvinceOrderBean>() {
                    @Override
                    public TradeProvinceOrderBean map(JSONObject jsonObj) throws Exception {

                        String provinceId = jsonObj.getString("province_id");
                        String orderId = jsonObj.getString("order_id");

                        Set<String> orderIdSet = new HashSet<>(Collections.singleton(orderId));

                        BigDecimal finalAmount = new BigDecimal(jsonObj.getString("final_amount"));
                        // 单位: s -> ms; 因为: 指定watermark 的事件时间类型 必须是timestamp类型
                        Long ts = jsonObj.getLong("ts") * 1000L;

                        TradeProvinceOrderBean provinceOrderBean = TradeProvinceOrderBean.builder()
                                .provinceId(provinceId)
                                .orderIdSet(orderIdSet)
                                .finalAmount(finalAmount)
                                .ts(ts)
                                .build();

                        return provinceOrderBean;
                    }
                }
        );
        // For test output ->
//        orderBeanStream.print("orderBean->");

        // TODO 8. 指定watermark 以及 提取event time事件时间字段
        SingleOutputStreamOperator<TradeProvinceOrderBean> withWatermarkStream = orderBeanStream.assignTimestampsAndWatermarks(
                WatermarkStrategy.<TradeProvinceOrderBean>forMonotonousTimestamps()
                        .withTimestampAssigner(
                                new SerializableTimestampAssigner<TradeProvinceOrderBean>() {
                                    @Override
                                    public long extractTimestamp(TradeProvinceOrderBean element, long recordTimestamp) {
                                        return element.getTs();
                                    }
                                }
                        )
        );

        // TODO 9. 按照统计的维度(省份id) 进行分组
        KeyedStream<TradeProvinceOrderBean, String> withProvinceIdKeyedByStream =
                withWatermarkStream.keyBy(TradeProvinceOrderBean::getProvinceId);

        // TODO 10. 开窗
        WindowedStream<TradeProvinceOrderBean, String, TimeWindow> windowedStream =
                withProvinceIdKeyedByStream.window(TumblingEventTimeWindows.of(org.apache.flink.streaming.api.windowing.time.Time.seconds(10L)));

        // TODO 11. 聚合计算
        SingleOutputStreamOperator<TradeProvinceOrderBean> reduceedStream = windowedStream.reduce(
                new ReduceFunction<TradeProvinceOrderBean>() {
                    @Override
                    public TradeProvinceOrderBean reduce(TradeProvinceOrderBean value1, TradeProvinceOrderBean value2) throws Exception {
                        value1.setFinalAmount(value1.getFinalAmount().add(value2.getFinalAmount()));
                        // Set集合 两两聚合; addAll()方法: 将另外一个Collection的元素 添加到当前Collection
                        value1.getOrderIdSet().addAll(value2.getOrderIdSet());
                        return value1;
                    }
                },
                new WindowFunction<TradeProvinceOrderBean, TradeProvinceOrderBean, String, TimeWindow>() {
                    @Override
                    public void apply(String key, TimeWindow window, Iterable<TradeProvinceOrderBean> input, Collector<TradeProvinceOrderBean> out) throws Exception {
                        String stt = DateFormatUtil.toYmdHms(window.getStart());
                        String edt = DateFormatUtil.toYmdHms(window.getEnd());
                        String curDate = DateFormatUtil.toDate(window.getStart());
                        for (TradeProvinceOrderBean bean : input) {
                            int size = bean.getOrderIdSet().size();
                            bean.setStt(stt);
                            bean.setEdt(edt);
                            bean.setCurDate(curDate);
                            bean.setOrderCount((long) size);
                            out.collect(bean);
                        }
                    }
                }
        );
        // For test output -> yes
//        reduceedStream.print("reducedStream->");

        // TODO 12. 关联省份维度(省份的name)
        SingleOutputStreamOperator<TradeProvinceOrderBean> withProvinceNameStream = AsyncDataStream.unorderedWait(
                reduceedStream,
                new DimAsyncFunction<TradeProvinceOrderBean>("dim_base_province") {
                    @Override
                    public String getKey(TradeProvinceOrderBean provinceOrderBean) {
                        return provinceOrderBean.getProvinceId();
                    }

                    @Override
                    public void join(TradeProvinceOrderBean provinceOrderBean, JSONObject dimInfoJsonObj) {
                        provinceOrderBean.setProvinceName(dimInfoJsonObj.getString("name"));
                    }
                },
                60L,
                TimeUnit.SECONDS
        );
        // For test output ->
        withProvinceNameStream.print("withProvinceName->");

        // TODO 13. 将关联的结果写到Doris
        withProvinceNameStream
                .map(new BeanToJsonStrFunction<TradeProvinceOrderBean>())
                .sinkTo(DorisUtil.getDorisSink("dws_trade_province_order_window"));

        // 执行环境
        try {
            env.execute();
        } catch (Exception e) {
            throw new RuntimeException(e);
        }

    }
}
