package com.nepu.gmall.realtime.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.nepu.gmall.realtime.bean.CartAddUuBean;
import com.nepu.gmall.realtime.util.ClickHouseUtil;
import com.nepu.gmall.realtime.util.DateFormatUtil;
import com.nepu.gmall.realtime.util.KafkaUtils;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.AllWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.time.Duration;

/**
 * 交易域加购各窗口汇总表
 * 从 Kafka 读取用户加购明细数据，统计每日各窗口加购独立用户数，写入 ClickHouse。
 * (1) 从kafka的dwd_trade_cart_add主题中读取出用户加购的数据
 * (2) 转换数据结构
 * (3) 提取事件事件，生成watermark
 * (4) 将数据根据user_id进行分组
 * (5) 根据状态过滤数据
 * (6) 开窗、聚合
 * (7) 将数据写入到clickhouse
 *
 * 数据的流向
 *  mock --> mysql --> maxwell --> kafka(topic_db) --> DwdTradeCartAdd.class --> kafka(dwd_trade_cart_add) --> DwsTradeCartAddUuWindow.class --> clickHouse
 * @author chenshuaijun
 * @create 2023-03-01 21:04
 */
public class DwsTradeCartAddUuWindow {

    public static void main(String[] args) throws Exception {

        // TODO 1、加载流式执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // 生产环境下是一定不会将任务的并行度设置为1的，这里具体的设置是和我们等下要读取的kafka的相应的主题的分区的个数相同
        env.setParallelism(1);
        // 设置checkpoint的信息：设置checkpoint的间隔是5分钟,并且checkpoint的级别是精确一次性
        /*env.enableCheckpointing(5 * 60000L, CheckpointingMode.EXACTLY_ONCE);
        // 设置checkpoint的超时时间是10分钟
        env.getCheckpointConfig().setCheckpointTimeout(10 * 60000L);
        // 设置外部检查点。可以将检查点的元数据信息定期写入外部系统，这样当job失败时，检查点不会被清除。这样如果job失败，可以从检查点恢复job。
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        // 设置checkpoint的重启的策略
        env.setRestartStrategy(RestartStrategies.failureRateRestart(10, Time.of(1L, TimeUnit.DAYS), Time.of(3L, TimeUnit.MINUTES)));
        // 设置两个checkpoint之间的最小的间隔时间
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
        // 设置状态后端: 设置状态后端为内存级别
        env.setStateBackend(new HashMapStateBackend());
        // 设置checkpoint的存储的路径
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/checkpoint");
        // 因为我们的HDFS只有atguigu用户才能够操作，所以要将用户设置为atguigu
        System.setProperty("HADOOP_USER_NAME", "atguigu");*/

        // TODO 2、从kafka对于的topic中读取数据
        String topic = "dwd_trade_cart_add";
        DataStreamSource<String> kafkaSourceStream = env.addSource(KafkaUtils.getKafkaConsumer(topic, "DwsTradeCartAddUuWindow"));
        // TODO 3、转换数据结构
        SingleOutputStreamOperator<JSONObject> mapDataStream = kafkaSourceStream.map(JSON::parseObject);
        // TODO 4、提取事件时间，生成 watermark
        SingleOutputStreamOperator<JSONObject> waterMarkDataStream = mapDataStream.assignTimestampsAndWatermarks(WatermarkStrategy.<JSONObject>forBoundedOutOfOrderness(Duration.ofSeconds(2)).withTimestampAssigner(new SerializableTimestampAssigner<JSONObject>() {
            @Override
            public long extractTimestamp(JSONObject element, long recordTimestamp) {
                String operateTime = element.getString("operate_time");
                String createTime = element.getString("create_time");
                long ts;
                if (operateTime != null) {
                    ts = DateFormatUtil.toTs(operateTime, true);
                } else {
                    ts = DateFormatUtil.toTs(createTime, true);
                }
                return ts;
            }
        }));
        // TODO 5、分组
        KeyedStream<JSONObject, String> keyedStream = waterMarkDataStream.keyBy(new KeySelector<JSONObject, String>() {
            @Override
            public String getKey(JSONObject value) throws Exception {
                return value.getString("user_id");
            }
        });
        // TODO 6、根据状态，判断每日加购
        SingleOutputStreamOperator<CartAddUuBean> flatMapDataStream = keyedStream.flatMap(new RichFlatMapFunction<JSONObject, CartAddUuBean>() {
            // 上一次加购的时间
            private ValueState<String> lastAddDate;

            @Override
            public void open(Configuration parameters) throws Exception {
                // 设置状态的过期时间是1天
                ValueStateDescriptor<String> stateDescriptor = new ValueStateDescriptor<>("last_car_add_data", String.class);
                StateTtlConfig ttlConfig = StateTtlConfig.newBuilder(Time.days(1))
                        .setUpdateType(StateTtlConfig.UpdateType.OnCreateAndWrite)
                        .build();
                stateDescriptor.enableTimeToLive(ttlConfig);

                lastAddDate = getRuntimeContext().getState(stateDescriptor);
            }

            @Override
            public void flatMap(JSONObject value, Collector<CartAddUuBean> out) throws Exception {

                // 1、先获取当前的时间，如果有操作时间就取操作时间，没有就去创建时间
                String operateTime = value.getString("operate_time");
                String createTime = value.getString("create_time");
                String curDate;
                if (operateTime != null) {
                    curDate = operateTime.split(" ")[0];
                } else {
                    curDate = createTime.split(" ")[0];
                }
                // 取出状态时间
                String stateDate = lastAddDate.value();
                // 加购次数
                if (stateDate == null || !stateDate.equals(curDate)) {

                    lastAddDate.update(curDate);

                    out.collect(new CartAddUuBean("", "", 1L, 0L));
                }

            }
        });

        // TODO 7、开窗、聚合
        SingleOutputStreamOperator<CartAddUuBean> reduceDataStream = flatMapDataStream.windowAll(TumblingProcessingTimeWindows.of(org.apache.flink.streaming.api.windowing.time.Time.seconds(10)))
                .reduce(new ReduceFunction<CartAddUuBean>() {
                    @Override
                    public CartAddUuBean reduce(CartAddUuBean value1, CartAddUuBean value2) throws Exception {
                        value1.setCartAddUuCt(value1.getCartAddUuCt() + value2.getCartAddUuCt());
                        return value1;
                    }
                }, new AllWindowFunction<CartAddUuBean, CartAddUuBean, TimeWindow>() {
                    @Override
                    public void apply(TimeWindow window, Iterable<CartAddUuBean> values, Collector<CartAddUuBean> out) throws Exception {
                        CartAddUuBean next = values.iterator().next();
                        next.setStt(DateFormatUtil.toYmdHms(window.getStart()));
                        next.setEdt(DateFormatUtil.toYmdHms(window.getEnd()));
                        next.setTs(System.currentTimeMillis());
                        out.collect(next);
                    }
                });

        reduceDataStream.print(">>>>>>>>>");

        // TODO 8、将数据写入到clickHouse

        reduceDataStream.addSink(ClickHouseUtil.getJdbcSink("insert into dws_trade_cart_add_uu_window values(?,?,?,?)"));

        // TODO 9、执行
        env.execute("DwsTradeCartAddUuWindow");
    }
}
