package com.nepu.gmall.realtime.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.nepu.gmall.realtime.app.func.DimAsyncFunction;
import com.nepu.gmall.realtime.bean.TradeProvinceOrderWindow;
import com.nepu.gmall.realtime.util.ClickHouseUtil;
import com.nepu.gmall.realtime.util.DateFormatUtil;
import com.nepu.gmall.realtime.util.KafkaUtils;
import com.nepu.gmall.realtime.util.TimestampLtz3CompareUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.time.Duration;
import java.util.HashSet;
import java.util.concurrent.TimeUnit;

/**
 * 交易域省份粒度下单各窗口汇总表
 * 从 Kafka 读取订单明细数据，过滤 null 数据并按照唯一键对数据去重，统计各省份各窗口订单数和订单金额，
 * 将数据写入 ClickHouse 交易域省份粒度下单各窗口汇总表。
 * （1）首先从kafka的dwd_trade_order_detail主题中读取数据
 * （2）转换数据为json格式
 * （3）对数据按照order_detail进行过滤
 * （4）转换数据结构
 * （5）对数据进行key by
 * （7）开窗聚合
 * （8）补充维度字段
 * （9）将数据输出到clickhouse
 *
 * 数据流向：
 * mock --> mysql --> maxwell --> kafka --> DwdTradeOrderPreProcess.class --> DwdTradeOrderDetail --> DwsTradeProvinceOrderWindow -->clickHouse
 * 启动的服务：
 *  mysql 、hdfs 、zookeeper、maxwell、kafka 、 redis 、hbase、 phoenix、clickhouse
 * @author chenshuaijun
 * @create 2023-03-06 13:53
 */
public class DwsTradeProvinceOrderWindow {

    public static void main(String[] args) throws Exception {

        // TODO 1、加载流式的处理环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // 生产环境下是一定不会将任务的并行度设置为1的，这里具体的设置是和我们等下要读取的kafka的相应的主题的分区的个数相同
        env.setParallelism(1);
        // 设置checkpoint的信息：设置checkpoint的间隔是5分钟,并且checkpoint的级别是精确一次性
        /*env.enableCheckpointing(5 * 60000L, CheckpointingMode.EXACTLY_ONCE);
        // 设置checkpoint的超时时间是10分钟
        env.getCheckpointConfig().setCheckpointTimeout(10 * 60000L);
        // 设置外部检查点。可以将检查点的元数据信息定期写入外部系统，这样当job失败时，检查点不会被清除。这样如果job失败，可以从检查点恢复job。
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        // 设置checkpoint的重启的策略
        env.setRestartStrategy(RestartStrategies.failureRateRestart(10, Time.of(1L, TimeUnit.DAYS), Time.of(3L, TimeUnit.MINUTES)));
        // 设置两个checkpoint之间的最小的间隔时间
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
        // 设置状态后端: 设置状态后端为内存级别
        env.setStateBackend(new HashMapStateBackend());
        // 设置checkpoint的存储的路径
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/checkpoint");
        // 因为我们的HDFS只有atguigu用户才能够操作，所以要将用户设置为atguigu
        System.setProperty("HADOOP_USER_NAME", "atguigu");*/
        // TODO 2、从kafka的dwd_trade_order_detail主题中消费数据
        String topic = "dwd_trade_order_detail";
        DataStreamSource<String> dwsTradeOrderDS= env.addSource(KafkaUtils.getKafkaConsumer(topic, "DwsTradeProvinceOrderWindow"));
        // TODO 3、转换数据结构，清洗数据
        SingleOutputStreamOperator<JSONObject> transformDataTypeDS = dwsTradeOrderDS.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String value, Collector<JSONObject> out) throws Exception {
                try {
                    JSONObject jsonObject = JSON.parseObject(value);
                    out.collect(jsonObject);
                } catch (Exception e) {
                    System.out.println("错误数据：" + value);
                }
            }
        });

        // TODO 4、将数据按照order_detail_id进行分组
        KeyedStream<JSONObject, String> keyedOrderDetailIdStream = transformDataTypeDS.keyBy(json -> json.getString("id"));
        // TODO 5、使用状态+定时器保留最后一条数据
        SingleOutputStreamOperator<JSONObject> filterDS = keyedOrderDetailIdStream.process(new KeyedProcessFunction<String, JSONObject, JSONObject>() {

            private ValueState<JSONObject> valueState;

            @Override
            public void open(Configuration parameters) throws Exception {
                valueState = getRuntimeContext().getState(new ValueStateDescriptor<JSONObject>("last_value", JSONObject.class));
            }

            @Override
            public void processElement(JSONObject value, Context ctx, Collector<JSONObject> out) throws Exception {
                // 拿到状态的时间
                JSONObject stateValue = valueState.value();
                if (stateValue == null) {
                    valueState.update(value);
                    // 注册一个定时器
                    ctx.timerService().registerProcessingTimeTimer(ctx.timerService().currentProcessingTime() + 5000L);
                } else {
                    String stateRowOpTs = stateValue.getString("row_op_ts");
                    String valueRowOpTs = value.getString("row_op_ts");
                    if (TimestampLtz3CompareUtil.compare(stateRowOpTs, valueRowOpTs) != 1) {
                        valueState.update(value);
                    }
                }
            }

            @Override
            public void onTimer(long timestamp, OnTimerContext ctx, Collector<JSONObject> out) throws Exception {
                out.collect(valueState.value());
                valueState.clear();
            }
        });
        // TODO 6、转换数据结构为JavaBean
        SingleOutputStreamOperator<TradeProvinceOrderWindow> jsonToBeanDS = filterDS.map(new MapFunction<JSONObject, TradeProvinceOrderWindow>() {
            @Override
            public TradeProvinceOrderWindow map(JSONObject value) throws Exception {
                HashSet<String> set = new HashSet<>();
                set.add(value.getString("order_id"));
                return new TradeProvinceOrderWindow("", "", value.getString("province_id"), "", 0L, set, value.getDouble("split_total_amount"), DateFormatUtil.toTs(value.getString("create_time"), true));
            }
        });
        // TODO 7、提取事件事件生成watermark
        SingleOutputStreamOperator<TradeProvinceOrderWindow> watermarkDS = jsonToBeanDS.assignTimestampsAndWatermarks(WatermarkStrategy.<TradeProvinceOrderWindow>forBoundedOutOfOrderness(Duration.ofSeconds(2)).withTimestampAssigner(new SerializableTimestampAssigner<TradeProvinceOrderWindow>() {
            @Override
            public long extractTimestamp(TradeProvinceOrderWindow element, long recordTimestamp) {
                return element.getTs();
            }
        }));
        
        // TODO 8、按照省份ID进行分组
        KeyedStream<TradeProvinceOrderWindow, String> keyedStream = watermarkDS.keyBy(TradeProvinceOrderWindow::getProvinceId);
        // TODO 9、开窗、聚合
        SingleOutputStreamOperator<TradeProvinceOrderWindow> reduceDs = keyedStream.window(TumblingProcessingTimeWindows.of(Time.seconds(10)))
                .reduce(new ReduceFunction<TradeProvinceOrderWindow>() {
                    @Override
                    public TradeProvinceOrderWindow reduce(TradeProvinceOrderWindow value1, TradeProvinceOrderWindow value2) throws Exception {
                        value1.getOrderIdSet().addAll(value2.getOrderIdSet());
                        value1.setOrderAmount(value1.getOrderAmount() + value2.getOrderAmount());
                        return value1;
                    }
                }, new WindowFunction<TradeProvinceOrderWindow, TradeProvinceOrderWindow, String, TimeWindow>() {
                    @Override
                    public void apply(String s, TimeWindow window, Iterable<TradeProvinceOrderWindow> input, Collector<TradeProvinceOrderWindow> out) throws Exception {
                        TradeProvinceOrderWindow orderWindow = input.iterator().next();
                        orderWindow.setTs(System.currentTimeMillis());
                        orderWindow.setStt(DateFormatUtil.toYmdHms(window.getStart()));
                        orderWindow.setEdt(DateFormatUtil.toYmdHms(window.getEnd()));
                        orderWindow.setOrderCount((long) orderWindow.getOrderIdSet().size());
                        out.collect(orderWindow);
                    }
                });

        // TODO 10、进行维度关联
        SingleOutputStreamOperator<TradeProvinceOrderWindow> resultDS = AsyncDataStream.unorderedWait(reduceDs, new DimAsyncFunction<TradeProvinceOrderWindow>("DIM_BASE_PROVINCE") {
            @Override
            public String getKey(TradeProvinceOrderWindow input) {
                return input.getProvinceId();
            }

            @Override
            public void encapsulateBeans(TradeProvinceOrderWindow input, JSONObject dimInfo) {
                input.setProvinceName(dimInfo.getString("NAME"));
            }
        }, 100, TimeUnit.SECONDS);

        // TODO 11、将数据写出到clickHouse
        resultDS.print("resultDS>>>>>>>>>>>>");
        resultDS.addSink(ClickHouseUtil.getJdbcSink("insert into dws_trade_province_order_window values(?,?,?,?,?,?,?)"));

        // TODO 12、执行
        env.execute("DwsTradeProvinceOrderWindow");

    }

}
