package com.nepu.gmall.realtime.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.nepu.gmall.realtime.bean.TradeOrderBean;
import com.nepu.gmall.realtime.util.ClickHouseUtil;
import com.nepu.gmall.realtime.util.DateFormatUtil;
import com.nepu.gmall.realtime.util.KafkaUtils;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichFilterFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.AllWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.time.Duration;

/**
 * 交易域下单各窗口汇总表
 * 从 Kafka 订单明细主题读取数据，对数据去重，统计当日下单独立用户数和新增下单用户数，封装为实体类，写入 ClickHouse。
 * (1) 首先从kafka的dwd_trade_order_detail主题中读取数据
 * (2) 转换数据结构，过滤掉不符合格式的数据
 * (3) 根据order_detail_id对数据进行分组
 * (4) 对数据进行去重（去重的方式是，因为我们没有用到右表的数据，所以可以只保留第一条数据即可）
 * (5) 提取事件时间生成watermark
 * (6) 根据用户id对数据进行分组
 * (7) 根据状态计算：当日下单独立用户数和新增下单用户数
 * (8) 开窗聚合
 * (9) 将数据写入到ClickHouse
 * (10) 执行程序
 *
 * 数据的流向
 * mock --> mysql --> maxwell --> kafka(topic_db) --> DwdTradeOrderPreProcess.class -> kafka --> DwdTradeOrderDetail.class -->kafka --> DwsTradeOrderWindow.class -->clickHouse
 * @author chenshuaijun
 * @create 2023-03-03 13:49
 */
public class DwsTradeOrderWindow {

    public static void main(String[] args) throws Exception {
        // TODO 1、加载执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // 生产环境下是一定不会将任务的并行度设置为1的，这里具体的设置是和我们等下要读取的kafka的相应的主题的分区的个数相同
        env.setParallelism(1);
        // 设置checkpoint的信息：设置checkpoint的间隔是5分钟,并且checkpoint的级别是精确一次性
        /*env.enableCheckpointing(5 * 60000L, CheckpointingMode.EXACTLY_ONCE);
        // 设置checkpoint的超时时间是10分钟
        env.getCheckpointConfig().setCheckpointTimeout(10 * 60000L);
        // 设置外部检查点。可以将检查点的元数据信息定期写入外部系统，这样当job失败时，检查点不会被清除。这样如果job失败，可以从检查点恢复job。
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        // 设置checkpoint的重启的策略
        env.setRestartStrategy(RestartStrategies.failureRateRestart(10, Time.of(1L, TimeUnit.DAYS), Time.of(3L, TimeUnit.MINUTES)));
        // 设置两个checkpoint之间的最小的间隔时间
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
        // 设置状态后端: 设置状态后端为内存级别
        env.setStateBackend(new HashMapStateBackend());
        // 设置checkpoint的存储的路径
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/checkpoint");
        // 因为我们的HDFS只有atguigu用户才能够操作，所以要将用户设置为atguigu
        System.setProperty("HADOOP_USER_NAME", "atguigu");*/
        // TODO 2、从kafka的dwd_trade_order_detail主题中消费数据
        String topic = "dwd_trade_order_detail";
        DataStreamSource<String> dwsTradeOrderDS= env.addSource(KafkaUtils.getKafkaConsumer(topic, "DwsTradeOrderWindow"));
        // TODO 3、转换数据结构，清洗数据
        SingleOutputStreamOperator<JSONObject> transformDataTypeDS = dwsTradeOrderDS.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String value, Collector<JSONObject> out) throws Exception {
                try {
                    JSONObject jsonObject = JSON.parseObject(value);
                    out.collect(jsonObject);
                } catch (Exception e) {
                    System.out.println("错误数据：" + value);
                }
            }
        });
        // TODO 4、将数据按照order_detail_id进行分组
        KeyedStream<JSONObject, String> keyByOrderDetailDS = transformDataTypeDS.keyBy(json -> json.getString("id"));
        // TODO 5、按照状态对数据进行过滤只保留第一条
        SingleOutputStreamOperator<JSONObject> filterOrderDetailDS = keyByOrderDetailDS.filter(new RichFilterFunction<JSONObject>() {

            private ValueState<String> valueState;

            @Override
            public void open(Configuration parameters) throws Exception {

                ValueStateDescriptor<String> stateDescriptor = new ValueStateDescriptor<>("last_order", String.class);
                StateTtlConfig ttlConfig = StateTtlConfig.newBuilder(Time.seconds(5))
                        .setUpdateType(StateTtlConfig.UpdateType.OnReadAndWrite)
                        .build();
                stateDescriptor.enableTimeToLive(ttlConfig);
                valueState = getRuntimeContext().getState(stateDescriptor);
            }

            @Override
            public boolean filter(JSONObject value) throws Exception {

                // 首先读取状态中的数据
                String stateValue = valueState.value();
                if (stateValue == null) {
                    //这里可以随便给一个值，只要是不为null就行
                    valueState.update("1");
                    return true;
                } else {
                    return false;
                }
            }
        });
        
        // TODO 6、提取事件时间生成watermark
        SingleOutputStreamOperator<JSONObject> watermarksDS = filterOrderDetailDS.assignTimestampsAndWatermarks(WatermarkStrategy.<JSONObject>forBoundedOutOfOrderness(Duration.ofSeconds(2)).withTimestampAssigner(new SerializableTimestampAssigner<JSONObject>() {
            @Override
            public long extractTimestamp(JSONObject element, long recordTimestamp) {
                return DateFormatUtil.toTs(element.getString("create_time"), true);
            }
        }));
        // TODO 7、按照user_id对数据进行分组
        KeyedStream<JSONObject, String> keyedUserIDStream = watermarksDS.keyBy(json -> json.getString("user_id"));
        // TODO 8、按照状态对数据进行处理
        SingleOutputStreamOperator<TradeOrderBean> flatMapDS = keyedUserIDStream.flatMap(new RichFlatMapFunction<JSONObject, TradeOrderBean>() {

            private ValueState<String> valueState;

            @Override
            public void open(Configuration parameters) throws Exception {
                valueState = getRuntimeContext().getState(new ValueStateDescriptor<String>("last_date", String.class));
            }

            @Override
            public void flatMap(JSONObject value, Collector<TradeOrderBean> out) throws Exception {

                // 首先拿到状态中的值
                String state = valueState.value();
                // 取出数据中的时间
                String currentDate = value.getString("create_time").split(" ")[0];
                // 下单独立用户数
                long orderUniqueUserCount = 0L;
                // 下单新用户数
                long orderNewUserCount = 0L;

                if (state == null) {
                    orderUniqueUserCount = 1L;
                    orderNewUserCount = 1L;
                    valueState.update(currentDate);
                } else if (!state.equals(currentDate)) {
                    orderUniqueUserCount = 1L;
                    valueState.update(currentDate);
                }
                Double splitActivityAmount = value.getDouble("split_activity_amount");
                Double splitCouponAmount = value.getDouble("split_coupon_amount");
                Integer skuNum = value.getInteger("sku_num");
                Double orderPrice = value.getDouble("order_price");
                if (splitActivityAmount == null) {
                    splitActivityAmount = 0.0D;
                }
                if (splitCouponAmount == null) {
                    splitCouponAmount = 0.0D;
                }
                out.collect(new TradeOrderBean("", "", orderUniqueUserCount, orderNewUserCount, splitActivityAmount, splitCouponAmount, skuNum * orderPrice, null));

            }
        });

        // TODO 9、开窗聚合
        SingleOutputStreamOperator<TradeOrderBean> resultDS = flatMapDS.windowAll(TumblingEventTimeWindows.of(org.apache.flink.streaming.api.windowing.time.Time.seconds(10)))
                .reduce(new ReduceFunction<TradeOrderBean>() {
                    @Override
                    public TradeOrderBean reduce(TradeOrderBean value1, TradeOrderBean value2) throws Exception {
                        value1.setOrderUniqueUserCount(value1.getOrderUniqueUserCount() + value2.getOrderUniqueUserCount());
                        value1.setOrderNewUserCount(value1.getOrderNewUserCount() + value2.getOrderNewUserCount());
                        value1.setOrderActivityReduceAmount(value1.getOrderActivityReduceAmount() + value2.getOrderActivityReduceAmount());
                        value1.setOrderCouponReduceAmount(value1.getOrderCouponReduceAmount() + value2.getOrderCouponReduceAmount());
                        value1.setOrderOriginalTotalAmount(value1.getOrderOriginalTotalAmount() + value2.getOrderOriginalTotalAmount());
                        return value1;
                    }
                }, new AllWindowFunction<TradeOrderBean, TradeOrderBean, TimeWindow>() {
                    @Override
                    public void apply(TimeWindow window, Iterable<TradeOrderBean> values, Collector<TradeOrderBean> out) throws Exception {
                        TradeOrderBean tradeOrderBean = values.iterator().next();

                        tradeOrderBean.setStt(DateFormatUtil.toYmdHms(window.getStart()));
                        tradeOrderBean.setEdt(DateFormatUtil.toYmdHms(window.getEnd()));
                        tradeOrderBean.setTs(System.currentTimeMillis());

                        out.collect(tradeOrderBean);
                    }
                });

        // TODO 10、将数据写出

        resultDS.print(">>>>>>>>>>");

        resultDS.addSink(ClickHouseUtil.getJdbcSink("insert into dws_trade_order_window values(?,?,?,?,?,?,?,?)"));

        // TODO 11、执行
        env.execute("DwsTradeOrderWindow");

    }
}
