package com.atguigu.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.PropertyNamingStrategy;
import com.alibaba.fastjson.serializer.SerializeConfig;
import com.atguigu.app.func.DimAsyncFunction;
import com.atguigu.bean.TradeProvinceOrderBean_02;
import com.atguigu.common.Constant;
import com.atguigu.utils.DateFormatUtil;
import com.atguigu.utils.DorisUtil;
import com.atguigu.utils.KafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.time.Duration;
import java.util.HashSet;
import java.util.concurrent.TimeUnit;

//数据流:web/app -> Nginx -> 业务服务器(Mysql) -> Maxwell -> Kafka(ODS) -> FlinkApp -> Kafka(DWD) -> FlinkApp -> Doris
//程  序:Mock -> Mysql -> Maxwell -> Kafka(ZK) -> Dwd04_TradeOrderDetail2 -> Kafka(ZK) -> Dws10_TradeProvinceOrderWindow(HDFS HBase Redis) -> Doris
public class Dws10_TradeProvinceOrderWindow_02 {

    public static void main(String[] args) throws Exception {

        //1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //1.1 开启CK
        env.enableCheckpointing(10000L);
        CheckpointConfig checkpointConfig = env.getCheckpointConfig();
        checkpointConfig.setCheckpointTimeout(20000L);
        checkpointConfig.setCheckpointStorage("hdfs://hadoop102:8020/flink-ck");
        checkpointConfig.setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
        //checkpointConfig.setCheckpointInterval(10000L);
        checkpointConfig.setMinPauseBetweenCheckpoints(5000L);
        checkpointConfig.setMaxConcurrentCheckpoints(2);
        //默认是int类型的最大值
        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 5000L));
        env.setStateBackend(new HashMapStateBackend());

        System.setProperty("HADOOP_USER_NAME", "atguigu");

        //2.消费Kafka DWD层 下单明细主题数据创建流
        DataStreamSource<String> kafkaDS = env.fromSource(KafkaUtil.getKafkaSource(Constant.TOPIC_DWD_TRADE_ORDER_DETAIL, "dws_province_order_230315"), WatermarkStrategy.noWatermarks(), "kafka-source");

        //3.过滤并转换为JSON对象
        SingleOutputStreamOperator<JSONObject> jsonObjDS = kafkaDS.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String value, Collector<JSONObject> out) throws Exception {
                if (!"".equals(value)) {
                    out.collect(JSON.parseObject(value));
                }
            }
        }).assignTimestampsAndWatermarks(WatermarkStrategy.<JSONObject>forBoundedOutOfOrderness(Duration.ofSeconds(2)).withTimestampAssigner(new SerializableTimestampAssigner<JSONObject>() {
            @Override
            public long extractTimestamp(JSONObject element, long recordTimestamp) {
                return element.getLong("create_time");
            }
        }));

        //4.按照order_detail_id分组去重(left join产生的重复数据)  方案二:任取一条即可,取第一条
        SingleOutputStreamOperator<TradeProvinceOrderBean_02> tradeProvinceOrderBeanDS = jsonObjDS.keyBy(json -> json.getString("id"))
                .flatMap(new RichFlatMapFunction<JSONObject, TradeProvinceOrderBean_02>() {

                    private ValueState<String> valueState;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        StateTtlConfig ttlConfig = new StateTtlConfig.Builder(Time.seconds(5))
                                .setUpdateType(StateTtlConfig.UpdateType.OnReadAndWrite)
                                .build();
                        ValueStateDescriptor<String> stateDescriptor = new ValueStateDescriptor<>("detail-value-state", String.class);
                        stateDescriptor.enableTimeToLive(ttlConfig);

                        valueState = getRuntimeContext().getState(stateDescriptor);
                    }

                    @Override
                    public void flatMap(JSONObject value, Collector<TradeProvinceOrderBean_02> out) throws Exception {

                        //取出状态数据
                        String state = valueState.value();

                        HashSet<String> orderIds = new HashSet<>();
                        orderIds.add(value.getString("order_id"));

                        if (state == null) {
                            out.collect(new TradeProvinceOrderBean_02("", "",
                                    value.getString("province_id"),
                                    "",
                                    value.getString("create_time").split(" ")[0],
                                    0L,
                                    value.getBigDecimal("split_total_amount"),
                                    orderIds));
                            valueState.update("1");
                        }
                    }
                });

        //6.分组开窗聚合
        SingleOutputStreamOperator<TradeProvinceOrderBean_02> reduceDS = tradeProvinceOrderBeanDS.keyBy(TradeProvinceOrderBean_02::getProvinceId)
                .window(TumblingEventTimeWindows.of(org.apache.flink.streaming.api.windowing.time.Time.seconds(10)))
                .reduce(new ReduceFunction<TradeProvinceOrderBean_02>() {
                    @Override
                    public TradeProvinceOrderBean_02 reduce(TradeProvinceOrderBean_02 value1, TradeProvinceOrderBean_02 value2) throws Exception {
                        value1.getOrderIds().addAll(value2.getOrderIds());
                        value1.setOrderAmount(value1.getOrderAmount().add(value2.getOrderAmount()));
                        return value1;
                    }
                }, new WindowFunction<TradeProvinceOrderBean_02, TradeProvinceOrderBean_02, String, TimeWindow>() {
                    @Override
                    public void apply(String key, TimeWindow window, Iterable<TradeProvinceOrderBean_02> input, Collector<TradeProvinceOrderBean_02> out) throws Exception {
                        TradeProvinceOrderBean_02 next = input.iterator().next();
                        next.setEdt(DateFormatUtil.toYmdHms(window.getEnd()));
                        next.setStt(DateFormatUtil.toYmdHms(window.getStart()));
                        next.setOrderCount((long) next.getOrderIds().size());
                        out.collect(next);
                    }
                });

        //7.关联维表补充省份信息
        //reduceDS.map(new DimMapFunction<>());
        SingleOutputStreamOperator<TradeProvinceOrderBean_02> resultDS = AsyncDataStream.unorderedWait(reduceDS,
                new DimAsyncFunction<TradeProvinceOrderBean_02>("dim_base_province") {
                    @Override
                    public String getKey(TradeProvinceOrderBean_02 input) {
                        return input.getProvinceId();
                    }

                    @Override
                    public void join(TradeProvinceOrderBean_02 input, JSONObject dimInfo) {
                        input.setProvinceName(dimInfo.getString("name"));
                    }
                }, 100, TimeUnit.SECONDS);

        //8.将数据写出到Doris
        resultDS.print("resultDS>>>>>>>>");
        resultDS.map(bean -> {
                    SerializeConfig config = new SerializeConfig();
                    config.propertyNamingStrategy = PropertyNamingStrategy.SnakeCase;  // 转成json的时候, 属性名使用下划线
                    return JSON.toJSONString(bean, config);
                })
                .sinkTo(DorisUtil.getDorisSink("dws_trade_province_order_window"));

        //9.启动
        env.execute("Dws10_TradeProvinceOrderWindow");

    }

}
