package com.atguigu.realtime.app.dws;

import com.alibaba.fastjson.JSONObject;
import com.atguigu.realtime.beans.TradeProvinceOrderBean;
import com.atguigu.realtime.func.BeanToJsonStrMapFcuntion;
import com.atguigu.realtime.func.DimAsyncFunction;
import com.atguigu.realtime.utils.DateFormatUtil;
import com.atguigu.realtime.utils.DorisUtil;
import com.atguigu.realtime.utils.MyKafkaUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.runtime.state.StateBackend;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.async.AsyncFunction;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import org.omg.IOP.ENCODING_CDR_ENCAPS;

import java.math.BigDecimal;
import java.util.Collections;
import java.util.HashSet;
import java.util.concurrent.TimeUnit;

/**
 * @author: 洛尘
 * @since: 2023-10-16 09:24
 * @description:
 **/
public class DwsTradeProvinceOrderWindow {

    public static void main(String[] args) throws Exception {
        //基本环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //设置并行度
        env.setParallelism(4);
        //检查点相关设置，开启检查点
        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
        env.setRestartStrategy(RestartStrategies.noRestart());
       /*
        //设置超时时间
        env.setBufferTimeout(60000l);
        //设置job提交后是否保存
        env.getCheckpointConfig().setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.NO_EXTERNALIZED_CHECKPOINTS);
        //设置两个检查点之间最小时间间隔
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000L);
        //设置重启策略
        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30),Time.seconds(3)));
        //设置状态后端
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop105:8020/gmall/ck");
        //设置操作文件的系统用户
        System.setProperty("HADOOP_USER_NAME","atguigu");*/
        String topic="dwd_trade_order_detail";
        String grouId="dws_trade_order_province_group";
        //创建kafka消费者
        KafkaSource<String> kafkaSource = MyKafkaUtil.getKafkaSource(topic, grouId);
        //创建kafka流
        SingleOutputStreamOperator<String> kafkaDS = env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "dws_trade_order_province");
        //将流中数据进行转换jsonStr->jsonObj,并且过滤空消息
        SingleOutputStreamOperator<JSONObject> jsonObjDS = kafkaDS.process(
                new ProcessFunction<String, JSONObject>() {
                    @Override
                    public void processElement(String jsonStr, Context context, Collector<JSONObject> collector) throws Exception {
                        if (StringUtils.isNotEmpty(jsonStr)){
                            JSONObject jsonObject = JSONObject.parseObject(jsonStr);
                            collector.collect(jsonObject);
                        }
                    }
                }
        );
        //去重 状态 + 抵消
        //去重，首先是根据关键字段分组id
        KeyedStream<JSONObject, String> keyByDs 
                = jsonObjDS.keyBy(jsonObject -> jsonObject.getString("id"));
        //去重处理
        SingleOutputStreamOperator<JSONObject> distinctDS = keyByDs.process(
                new KeyedProcessFunction<String, JSONObject, JSONObject>() {
                    private ValueState<JSONObject> laseValueState;

                    @Override
                    public void open(Configuration parameters) throws Exception {

                        ValueStateDescriptor<JSONObject> valueStateDescriptor = new ValueStateDescriptor<>("laseValueState", JSONObject.class);
                        valueStateDescriptor.enableTimeToLive(StateTtlConfig.newBuilder(Time.seconds(10)).build());
                        this.laseValueState =getRuntimeContext().getState(valueStateDescriptor);
                    }

                    @Override
                    public void processElement(JSONObject jsonObject, Context context, Collector<JSONObject> collector) throws Exception {
                        JSONObject lastValueObj = laseValueState.value();
                        if (lastValueObj!=null){
                            String amount = lastValueObj.getString("split_total_amount");
                            lastValueObj.put("split_total_amount","-"+amount);
                            collector.collect(lastValueObj);
                        }
                        laseValueState.update(jsonObject);
                        collector.collect(jsonObject);
                    }
                }
        );
        SingleOutputStreamOperator<TradeProvinceOrderBean> orderBeanDS = distinctDS.map(
                new MapFunction<JSONObject, TradeProvinceOrderBean>() {
                    @Override
                    public TradeProvinceOrderBean map(JSONObject jsonObject) throws Exception {
                        String provinceId = jsonObject.getString("province_id");
                        String splitTotalAmount = jsonObject.getString("split_total_amount");
                        Long ts = jsonObject.getLong("ts") * 1000;
                        String orderId = jsonObject.getString("order_id");
                        TradeProvinceOrderBean orderBean = TradeProvinceOrderBean.builder()
                                .provinceId(provinceId)
                                .orderAmount(new BigDecimal(splitTotalAmount))
                                .orderIdSet(new HashSet(Collections.singleton(orderId)))
                                .ts(ts)
                                .build();
                        return orderBean;
                    }
                }
        );
//指定WaterMark以及提取时间字段
        SingleOutputStreamOperator<TradeProvinceOrderBean> withWatermarkDS = orderBeanDS.assignTimestampsAndWatermarks(
                WatermarkStrategy.<TradeProvinceOrderBean>forMonotonousTimestamps()
                        .withTimestampAssigner(
                                new SerializableTimestampAssigner<TradeProvinceOrderBean>() {
                                    @Override
                                    public long extractTimestamp(TradeProvinceOrderBean tradeProvinceOrderBean, long l) {
                                        Long ts = tradeProvinceOrderBean.getTs();
                                        return ts;
                                    }
                                }
                        )
        );
        //这里再按照统计的维度 省份id 进行分组
        KeyedStream<TradeProvinceOrderBean, String> keyOrderDS
                = withWatermarkDS.keyBy(tradeProvinceOrderBean -> tradeProvinceOrderBean.getProvinceId());
        //开窗,根据事件时间开窗
        WindowedStream<TradeProvinceOrderBean, String, TimeWindow> windowDs = keyOrderDS.window(TumblingEventTimeWindows.of(org.apache.flink.streaming.api.windowing.time.Time.seconds(10)));
        //聚合计算
        SingleOutputStreamOperator<TradeProvinceOrderBean> reduceDS = windowDs.reduce(
                new ReduceFunction<TradeProvinceOrderBean>() {
                    @Override
                    public TradeProvinceOrderBean reduce(TradeProvinceOrderBean value1, TradeProvinceOrderBean value2) throws Exception {
                        value1.getOrderIdSet().addAll(value2.getOrderIdSet());
                        value1.setOrderAmount(value1.getOrderAmount().add(value2.getOrderAmount()));
                        return value1;
                    }
                },
                new ProcessWindowFunction<TradeProvinceOrderBean, TradeProvinceOrderBean, String, TimeWindow>() {
                    @Override
                    public void process(String groupID, Context context, Iterable<TradeProvinceOrderBean> iterable, Collector<TradeProvinceOrderBean> collector) throws Exception {
                        String stt = DateFormatUtil.toYmdHms(context.window().getStart());
                        String edt = DateFormatUtil.toYmdHms(context.window().getEnd());
                        String curDate = DateFormatUtil.toDate(context.window().getStart());
                        for (TradeProvinceOrderBean orderBean : iterable) {
                            orderBean.setStt(stt);
                            orderBean.setEdt(edt);
                            orderBean.setCurDate(curDate);
                            orderBean.setOrderCount((long) orderBean.getOrderIdSet().size());
                            collector.collect(orderBean);
                        }
                    }
                }
        );
        //聚合计算结束以后关联维度 关联省份维度
        SingleOutputStreamOperator<TradeProvinceOrderBean> withProvinceDS = AsyncDataStream.unorderedWait(
                reduceDS,
                new DimAsyncFunction<TradeProvinceOrderBean>("dim_base_province") {
                    @Override
                    public void join(TradeProvinceOrderBean orderBean, JSONObject dimJsonObj) {
                        orderBean.setProvinceName(dimJsonObj.getString("name"));
                    }

                    @Override
                    public String getKey(TradeProvinceOrderBean orderBean) {
                        return orderBean.getProvinceId();
                    }
                },
                60, TimeUnit.SECONDS
        );
        withProvinceDS.print(">>>>");

        withProvinceDS
                .map(new BeanToJsonStrMapFcuntion<>())
                .sinkTo(DorisUtil.getDorisSink("dws_trade_province_order_window"));

        env.execute();


    }
}