package com.nepu.gmall.realtime.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.nepu.gmall.realtime.app.func.DimAsyncFunction;
import com.nepu.gmall.realtime.bean.TradeUserSpuOrderBean;
import com.nepu.gmall.realtime.util.ClickHouseUtil;
import com.nepu.gmall.realtime.util.DateFormatUtil;
import com.nepu.gmall.realtime.util.KafkaUtils;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.*;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.AllWindowFunction;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import scala.Tuple4;

import java.time.Duration;
import java.util.HashSet;
import java.util.concurrent.TimeUnit;

/**
 * 交易域用户-SPU粒度下单各窗口汇总表
 * 从 Kafka 订单明细主题读取数据，过滤 null 数据并按照唯一键对数据去重，
 * 关联维度信息，按照维度分组，统计各维度各窗口的订单数和订单金额，
 * 将数据写入 ClickHouse 交易域品牌-品类-用户-SPU粒度下单各窗口汇总表。
 * 
 * （1）首先读取kafka的dwd_trade_order_detail主题的数据
 * （2）转换数据为json格式，并且过滤掉不符合规则的数据
 * （3）将数据按照order_detail_id进行分组，去除掉重读的数据
 * （4）根据sku_id去关联维表
 * （5）提取事件时间生成watermark
 * （6）对数据进行分组
 * （7）开窗聚合
 * （8）将数据写出到ClickHouse
 * （9）执行
 * 
 * 数据的流向
 *                                                                                                            DimSinkApp.class -->phoenix \
 * mock --> mysql --> maxwell --> kafka --> DwdTradeOrderPreProcess.class --> kafka --> DwdTradeOrderDetail.class --> DwsTradeUserSpuOrderWindow.class --> clickHouse
 * @author chenshuaijun
 * @create 2023-03-03 19:14
 */
public class DwsTradeUserSpuOrderWindow {

    public static void main(String[] args) throws Exception {

        // TODO 1、加载执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // 生产环境下是一定不会将任务的并行度设置为1的，这里具体的设置是和我们等下要读取的kafka的相应的主题的分区的个数相同
        env.setParallelism(1);
        // 设置checkpoint的信息：设置checkpoint的间隔是5分钟,并且checkpoint的级别是精确一次性
        /*env.enableCheckpointing(5 * 60000L, CheckpointingMode.EXACTLY_ONCE);
        // 设置checkpoint的超时时间是10分钟
        env.getCheckpointConfig().setCheckpointTimeout(10 * 60000L);
        // 设置外部检查点。可以将检查点的元数据信息定期写入外部系统，这样当job失败时，检查点不会被清除。这样如果job失败，可以从检查点恢复job。
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        // 设置checkpoint的重启的策略
        env.setRestartStrategy(RestartStrategies.failureRateRestart(10, Time.of(1L, TimeUnit.DAYS), Time.of(3L, TimeUnit.MINUTES)));
        // 设置两个checkpoint之间的最小的间隔时间
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
        // 设置状态后端: 设置状态后端为内存级别
        env.setStateBackend(new HashMapStateBackend());
        // 设置checkpoint的存储的路径
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/checkpoint");
        // 因为我们的HDFS只有atguigu用户才能够操作，所以要将用户设置为atguigu
        System.setProperty("HADOOP_USER_NAME", "atguigu");*/
        // TODO 2、从kafka的dwd_trade_order_detail主题中消费数据
        String topic = "dwd_trade_order_detail";
        DataStreamSource<String> dwsTradeOrderDS= env.addSource(KafkaUtils.getKafkaConsumer(topic, "DwsTradeOrderWindow"));
        // TODO 3、转换数据结构，清洗数据
        SingleOutputStreamOperator<JSONObject> transformDataTypeDS = dwsTradeOrderDS.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String value, Collector<JSONObject> out) throws Exception {
                try {
                    JSONObject jsonObject = JSON.parseObject(value);
                    out.collect(jsonObject);
                } catch (Exception e) {
                    System.out.println("错误数据：" + value);
                }
            }
        });
        // TODO 4、将数据按照order_detail_id进行分组
        KeyedStream<JSONObject, String> keyByOrderDetailDS = transformDataTypeDS.keyBy(json -> json.getString("id"));
        // TODO 5、按照状态对数据进行过滤只保留第一条
        SingleOutputStreamOperator<JSONObject> filterOrderDetailDS = keyByOrderDetailDS.filter(new RichFilterFunction<JSONObject>() {

            private ValueState<String> valueState;

            @Override
            public void open(Configuration parameters) throws Exception {

                ValueStateDescriptor<String> stateDescriptor = new ValueStateDescriptor<>("last_order", String.class);
                StateTtlConfig ttlConfig = StateTtlConfig.newBuilder(Time.seconds(5))
                        .setUpdateType(StateTtlConfig.UpdateType.OnReadAndWrite)
                        .build();
                stateDescriptor.enableTimeToLive(ttlConfig);
                valueState = getRuntimeContext().getState(stateDescriptor);
            }

            @Override
            public boolean filter(JSONObject value) throws Exception {

                // 首先读取状态中的数据
                String stateValue = valueState.value();
                if (stateValue == null) {
                    //这里可以随便给一个值，只要是不为null就行
                    valueState.update("1");
                    return true;
                } else {
                    return false;
                }
            }
        });
        // TODO 6、将数据转换成bean对象
        SingleOutputStreamOperator<TradeUserSpuOrderBean> transformTypeDs = filterOrderDetailDS.map(new MapFunction<JSONObject, TradeUserSpuOrderBean>() {
            @Override
            public TradeUserSpuOrderBean map(JSONObject value) throws Exception {
                HashSet<String> orderId = new HashSet<>();
                orderId.add(value.getString("order_id"));
                TradeUserSpuOrderBean spuOrderBean = TradeUserSpuOrderBean.builder()
                        .skuId(value.getString("sku_id"))
                        .userId(value.getString("user_id"))
                        .orderIdSet(orderId)
                        .orderAmount(value.getDouble("split_total_amount"))
                        .ts(DateFormatUtil.toTs(value.getString("create_time"), true))
                        .build();
                return spuOrderBean;
            }
        });

        // TODO 7、关联维表
/*        transformTypeDs.map(new RichMapFunction<TradeUserSpuOrderBean, TradeUserSpuOrderBean>() {

            @Override
            public void open(Configuration parameters) throws Exception {
                super.open(parameters);
                // 初始化连接
            }

            @Override
            public TradeUserSpuOrderBean map(TradeUserSpuOrderBean value) throws Exception {
                return null;
            }
        });*/
        // TODO 7、关联维表，采用异步IO的方式
        SingleOutputStreamOperator<TradeUserSpuOrderBean> tradeUserSpuDS = AsyncDataStream.unorderedWait(transformTypeDs, new DimAsyncFunction<TradeUserSpuOrderBean>("DIM_SKU_INFO") {
            @Override
            public String getKey(TradeUserSpuOrderBean input) {
                return input.getSkuId();
            }

            @Override
            public void encapsulateBeans(TradeUserSpuOrderBean input, JSONObject dimInfo) {
                input.setSpuId(dimInfo.getString("SPU_ID"));
                input.setCategory3Id(dimInfo.getString("CATEGORY3_ID"));
                input.setTrademarkId(dimInfo.getString("TM_ID"));
            }
        }, 100, TimeUnit.SECONDS);

        tradeUserSpuDS.print("tradeUserSpuDS>>>>>>>>>");

        // TODO 8、提取事件时间，生成watermark
        SingleOutputStreamOperator<TradeUserSpuOrderBean> tradeUserSpuOrderWatermarkDS = tradeUserSpuDS.assignTimestampsAndWatermarks(WatermarkStrategy.<TradeUserSpuOrderBean>forBoundedOutOfOrderness(Duration.ofSeconds(2)).withTimestampAssigner(new SerializableTimestampAssigner<TradeUserSpuOrderBean>() {
            @Override
            public long extractTimestamp(TradeUserSpuOrderBean element, long recordTimestamp) {
                return element.getTs();
            }
        }));

        // TODO 9、按照(spuId,category3Id,trademarkId,userID)对数据进行分组，开窗、聚合
        SingleOutputStreamOperator<TradeUserSpuOrderBean> reduceDS = tradeUserSpuOrderWatermarkDS.keyBy(new KeySelector<TradeUserSpuOrderBean, Tuple4<String, String, String, String>>() {

            @Override
            public Tuple4<String, String, String, String> getKey(TradeUserSpuOrderBean value) throws Exception {
                return Tuple4.apply(value.getUserId(), value.getSpuId(), value.getTrademarkId(), value.getCategory3Id());
            }
        }).window(TumblingProcessingTimeWindows.of(org.apache.flink.streaming.api.windowing.time.Time.seconds(10)))
                .reduce(new ReduceFunction<TradeUserSpuOrderBean>() {
                    @Override
                    public TradeUserSpuOrderBean reduce(TradeUserSpuOrderBean value1, TradeUserSpuOrderBean value2) throws Exception {
                        value1.getOrderIdSet().addAll(value2.getOrderIdSet());
                        value1.setOrderAmount(value1.getOrderAmount() + value2.getOrderAmount());
                        return value1;
                    }
                }, new WindowFunction<TradeUserSpuOrderBean, TradeUserSpuOrderBean, Tuple4<String, String, String, String>, TimeWindow>() {
                    @Override
                    public void apply(Tuple4<String, String, String, String> key, TimeWindow window, Iterable<TradeUserSpuOrderBean> input, Collector<TradeUserSpuOrderBean> out) throws Exception {
                        TradeUserSpuOrderBean tradeUserSpuOrderBean = input.iterator().next();
                        tradeUserSpuOrderBean.setTs(System.currentTimeMillis());
                        tradeUserSpuOrderBean.setStt(DateFormatUtil.toYmdHms(window.getStart()));
                        tradeUserSpuOrderBean.setEdt(DateFormatUtil.toYmdHms(window.getEnd()));
                        tradeUserSpuOrderBean.setOrderCount((long) tradeUserSpuOrderBean.getOrderIdSet().size());
                        out.collect(tradeUserSpuOrderBean);
                    }
                });

        // TODO 10、与spu_info进行join，补充相关的维度字段
        SingleOutputStreamOperator<TradeUserSpuOrderBean> resultDSJoinSpu = AsyncDataStream.unorderedWait(reduceDS, new DimAsyncFunction<TradeUserSpuOrderBean>("DIM_SPU_INFO") {
            @Override
            public String getKey(TradeUserSpuOrderBean input) {
                return input.getSpuId();
            }

            @Override
            public void encapsulateBeans(TradeUserSpuOrderBean input, JSONObject dimInfo) {
                input.setSpuName(dimInfo.getString("SPU_NAME"));

            }
        }, 100, TimeUnit.SECONDS);

        // TODO 11、关联Tm表
        SingleOutputStreamOperator<TradeUserSpuOrderBean> resultDSJoinTm = AsyncDataStream.unorderedWait(resultDSJoinSpu, new DimAsyncFunction<TradeUserSpuOrderBean>("DIM_BASE_TRADEMARK") {
            @Override
            public String getKey(TradeUserSpuOrderBean input) {
                return input.getTrademarkId();
            }

            @Override
            public void encapsulateBeans(TradeUserSpuOrderBean input, JSONObject dimInfo) {
                input.setTrademarkName(dimInfo.getString("TM_NAME"));

            }
        }, 100, TimeUnit.SECONDS);
        // TODO 12、关联cate3表
        SingleOutputStreamOperator<TradeUserSpuOrderBean> resultDSJoinCtThree = AsyncDataStream.unorderedWait(resultDSJoinTm, new DimAsyncFunction<TradeUserSpuOrderBean>("DIM_BASE_CATEGORY3") {
            @Override
            public String getKey(TradeUserSpuOrderBean input) {
                return input.getCategory3Id();
            }

            @Override
            public void encapsulateBeans(TradeUserSpuOrderBean input, JSONObject dimInfo) {
                input.setCategory3Name(dimInfo.getString("NAME"));
                input.setCategory2Id(dimInfo.getString("CATEGORY2_ID"));

            }
        }, 100, TimeUnit.SECONDS);

        // TODO 13、关联cate2表
        SingleOutputStreamOperator<TradeUserSpuOrderBean> resultDSJoinCtTwo = AsyncDataStream.unorderedWait(resultDSJoinCtThree, new DimAsyncFunction<TradeUserSpuOrderBean>("DIM_BASE_CATEGORY2") {
            @Override
            public String getKey(TradeUserSpuOrderBean input) {
                return input.getCategory2Id();
            }

            @Override
            public void encapsulateBeans(TradeUserSpuOrderBean input, JSONObject dimInfo) {
                input.setCategory2Name(dimInfo.getString("NAME"));
                input.setCategory1Id(dimInfo.getString("CATEGORY1_ID"));

            }
        }, 100, TimeUnit.SECONDS);
        // TODO 14、关联cate1表
        SingleOutputStreamOperator<TradeUserSpuOrderBean> resultDSJoinCtOne = AsyncDataStream.unorderedWait(resultDSJoinCtTwo, new DimAsyncFunction<TradeUserSpuOrderBean>("DIM_BASE_CATEGORY1") {
            @Override
            public String getKey(TradeUserSpuOrderBean input) {
                return input.getCategory1Id();
            }

            @Override
            public void encapsulateBeans(TradeUserSpuOrderBean input, JSONObject dimInfo) {
                input.setCategory1Name(dimInfo.getString("NAME"));

            }
        }, 100, TimeUnit.SECONDS);
        // TODO 15将数据写出到ClickHouse
        resultDSJoinCtOne.print("resultDSJoinCtOne>>>>>>>>>>>>>");
        resultDSJoinCtOne.addSink(ClickHouseUtil.getJdbcSink("insert into dws_trade_user_spu_order_window values(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"));
        env.execute("DwsTradeUserSpuOrderWindow");
    }
    
    
}
